[ 453.915733] env[62585]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62585) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 453.915998] env[62585]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62585) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 453.916163] env[62585]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62585) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 453.916515] env[62585]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 454.010362] env[62585]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62585) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 454.020567] env[62585]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62585) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 454.621999] env[62585]: INFO nova.virt.driver [None req-82cc5613-c527-40ba-b270-e8de51efd3b0 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 454.692322] env[62585]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 454.692507] env[62585]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 454.692580] env[62585]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62585) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 457.802480] env[62585]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-4836d4b4-1b2b-4e7f-81c9-dd6a58ba6185 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.817719] env[62585]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62585) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 457.817839] env[62585]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-0b7e49a4-9f3f-46c9-b1c6-9091f72f07aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.849534] env[62585]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d68ef. [ 457.849655] env[62585]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.157s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 457.850205] env[62585]: INFO nova.virt.vmwareapi.driver [None req-82cc5613-c527-40ba-b270-e8de51efd3b0 None None] VMware vCenter version: 7.0.3 [ 457.853573] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c9d823-5459-4978-bcab-62b5633420c5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.874794] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645e6a82-1edb-4e03-8675-c765274a6044 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.880698] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62fde51-9fa4-448c-8701-b25e7dd87d1e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.887060] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6d3716-bf55-47c1-96a4-ad75788b93cc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.899568] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339c7e21-409d-4c88-af6d-629d0c4c6452 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.905362] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52d5074-448a-4801-a05c-98d54ddc0753 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.934139] env[62585]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-db82dd34-4fe4-41b1-8d07-0a1e586c86a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.939153] env[62585]: DEBUG nova.virt.vmwareapi.driver [None req-82cc5613-c527-40ba-b270-e8de51efd3b0 None None] Extension org.openstack.compute already exists. {{(pid=62585) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 457.941780] env[62585]: INFO nova.compute.provider_config [None req-82cc5613-c527-40ba-b270-e8de51efd3b0 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 458.444685] env[62585]: DEBUG nova.context [None req-82cc5613-c527-40ba-b270-e8de51efd3b0 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),24163fb6-0d84-4982-a1f6-5fd07407376a(cell1) {{(pid=62585) load_cells /opt/stack/nova/nova/context.py:464}} [ 458.446799] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 458.447021] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 458.447682] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 458.448125] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Acquiring lock "24163fb6-0d84-4982-a1f6-5fd07407376a" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 458.448318] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Lock "24163fb6-0d84-4982-a1f6-5fd07407376a" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 458.449318] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Lock "24163fb6-0d84-4982-a1f6-5fd07407376a" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 458.469128] env[62585]: INFO dbcounter [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Registered counter for database nova_cell0 [ 458.477082] env[62585]: INFO dbcounter [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Registered counter for database nova_cell1 [ 458.480289] env[62585]: DEBUG oslo_db.sqlalchemy.engines [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62585) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 458.480891] env[62585]: DEBUG oslo_db.sqlalchemy.engines [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62585) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 458.485836] env[62585]: ERROR nova.db.main.api [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 458.485836] env[62585]: result = function(*args, **kwargs) [ 458.485836] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 458.485836] env[62585]: return func(*args, **kwargs) [ 458.485836] env[62585]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 458.485836] env[62585]: result = fn(*args, **kwargs) [ 458.485836] env[62585]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 458.485836] env[62585]: return f(*args, **kwargs) [ 458.485836] env[62585]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 458.485836] env[62585]: return db.service_get_minimum_version(context, binaries) [ 458.485836] env[62585]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 458.485836] env[62585]: _check_db_access() [ 458.485836] env[62585]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 458.485836] env[62585]: stacktrace = ''.join(traceback.format_stack()) [ 458.485836] env[62585]: [ 458.486637] env[62585]: ERROR nova.db.main.api [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 458.486637] env[62585]: result = function(*args, **kwargs) [ 458.486637] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 458.486637] env[62585]: return func(*args, **kwargs) [ 458.486637] env[62585]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 458.486637] env[62585]: result = fn(*args, **kwargs) [ 458.486637] env[62585]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 458.486637] env[62585]: return f(*args, **kwargs) [ 458.486637] env[62585]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 458.486637] env[62585]: return db.service_get_minimum_version(context, binaries) [ 458.486637] env[62585]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 458.486637] env[62585]: _check_db_access() [ 458.486637] env[62585]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 458.486637] env[62585]: stacktrace = ''.join(traceback.format_stack()) [ 458.486637] env[62585]: [ 458.487038] env[62585]: WARNING nova.objects.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Failed to get minimum service version for cell 24163fb6-0d84-4982-a1f6-5fd07407376a [ 458.487167] env[62585]: WARNING nova.objects.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 458.487584] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Acquiring lock "singleton_lock" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 458.487741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Acquired lock "singleton_lock" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 458.487983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Releasing lock "singleton_lock" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 458.488337] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Full set of CONF: {{(pid=62585) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 458.488486] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ******************************************************************************** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 458.488611] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Configuration options gathered from: {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 458.488763] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 458.488955] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 458.489100] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ================================================================================ {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 458.489318] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] allow_resize_to_same_host = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.489486] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] arq_binding_timeout = 300 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.489617] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] backdoor_port = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.489743] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] backdoor_socket = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.489905] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] block_device_allocate_retries = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.490080] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] block_device_allocate_retries_interval = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.490255] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cert = self.pem {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.490420] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.490622] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute_monitors = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.490817] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] config_dir = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491019] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] config_drive_format = iso9660 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491165] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491337] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] config_source = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491505] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] console_host = devstack {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491668] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] control_exchange = nova {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491826] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cpu_allocation_ratio = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.491986] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] daemon = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.492168] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] debug = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.492329] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] default_access_ip_network_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.492493] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] default_availability_zone = nova {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.492650] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] default_ephemeral_format = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.492836] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] default_green_pool_size = 1000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.493095] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.493267] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] default_schedule_zone = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.493423] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] disk_allocation_ratio = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.493583] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] enable_new_services = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.493763] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] enabled_apis = ['osapi_compute'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.493926] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] enabled_ssl_apis = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.494130] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] flat_injected = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.494304] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] force_config_drive = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.494539] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] force_raw_images = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.494725] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] graceful_shutdown_timeout = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.494898] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] heal_instance_info_cache_interval = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.495140] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] host = cpu-1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.495331] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.495495] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] initial_disk_allocation_ratio = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.495655] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] initial_ram_allocation_ratio = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.495861] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.496039] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_build_timeout = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.496262] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_delete_interval = 300 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.496380] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_format = [instance: %(uuid)s] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.496545] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_name_template = instance-%08x {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.496707] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_usage_audit = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.496879] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_usage_audit_period = month {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.497059] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.497227] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] instances_path = /opt/stack/data/nova/instances {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.497394] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] internal_service_availability_zone = internal {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.497548] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] key = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.497705] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] live_migration_retry_count = 30 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.497870] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_color = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.498055] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_config_append = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.498232] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.498395] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_dir = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.498551] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.498684] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_options = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.498840] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_rotate_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499037] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_rotate_interval_type = days {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499210] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] log_rotation_type = none {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499355] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499481] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499648] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499811] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.499938] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.500116] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] long_rpc_timeout = 1800 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.500281] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] max_concurrent_builds = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.500441] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] max_concurrent_live_migrations = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.500599] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] max_concurrent_snapshots = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.500757] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] max_local_block_devices = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.500913] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] max_logfile_count = 30 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.501079] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] max_logfile_size_mb = 200 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.501242] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] maximum_instance_delete_attempts = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.501412] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metadata_listen = 0.0.0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.501576] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metadata_listen_port = 8775 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.501741] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metadata_workers = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.501899] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] migrate_max_retries = -1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.502077] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] mkisofs_cmd = genisoimage {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.502287] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] my_block_storage_ip = 10.180.1.21 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.502421] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] my_ip = 10.180.1.21 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.502582] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] network_allocate_retries = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.502758] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.502924] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] osapi_compute_listen = 0.0.0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.503096] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] osapi_compute_listen_port = 8774 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.503267] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] osapi_compute_unique_server_name_scope = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.503435] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] osapi_compute_workers = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.503597] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] password_length = 12 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.503754] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] periodic_enable = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.503911] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] periodic_fuzzy_delay = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.504110] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] pointer_model = usbtablet {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.504297] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] preallocate_images = none {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.504460] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] publish_errors = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.504667] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] pybasedir = /opt/stack/nova {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.504836] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ram_allocation_ratio = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.504997] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] rate_limit_burst = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.505208] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] rate_limit_except_level = CRITICAL {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.505374] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] rate_limit_interval = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.505533] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reboot_timeout = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.505691] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reclaim_instance_interval = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.505846] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] record = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.506024] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reimage_timeout_per_gb = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.506246] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] report_interval = 120 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.506432] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] rescue_timeout = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.506595] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reserved_host_cpus = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.506754] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reserved_host_disk_mb = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.506914] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reserved_host_memory_mb = 512 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.507088] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] reserved_huge_pages = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.507318] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] resize_confirm_window = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.507501] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] resize_fs_using_block_device = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.507663] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] resume_guests_state_on_host_boot = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.507831] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.507990] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] rpc_response_timeout = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.508162] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] run_external_periodic_tasks = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.508331] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] running_deleted_instance_action = reap {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.508491] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] running_deleted_instance_poll_interval = 1800 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.508646] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] running_deleted_instance_timeout = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.508799] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler_instance_sync_interval = 120 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.508962] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_down_time = 720 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.509139] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] servicegroup_driver = db {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.509354] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] shell_completion = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.509530] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] shelved_offload_time = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.509689] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] shelved_poll_interval = 3600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.509853] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] shutdown_timeout = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.510023] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] source_is_ipv6 = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.510189] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ssl_only = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.510436] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.510600] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] sync_power_state_interval = 600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.510762] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] sync_power_state_pool_size = 1000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.510930] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] syslog_log_facility = LOG_USER {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.511098] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] tempdir = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.511262] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] timeout_nbd = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.511427] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] transport_url = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.511587] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] update_resources_interval = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.511745] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_cow_images = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.511900] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_eventlog = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.512137] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_journal = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.512319] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_json = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.512480] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_rootwrap_daemon = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.512638] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_stderr = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.512796] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] use_syslog = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.512949] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vcpu_pin_set = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.513131] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plugging_is_fatal = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.513302] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plugging_timeout = 300 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.513467] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] virt_mkfs = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.513624] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] volume_usage_poll_interval = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.513781] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] watch_log_file = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.513944] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] web = /usr/share/spice-html5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 458.514167] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.514354] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.514522] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.514771] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_concurrency.disable_process_locking = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.515401] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.515595] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.515769] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.515945] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.516135] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.516307] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.516495] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.auth_strategy = keystone {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.516664] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.compute_link_prefix = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.516841] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.517024] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.dhcp_domain = novalocal {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.517202] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.enable_instance_password = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.517372] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.glance_link_prefix = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.517540] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.517713] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.517879] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.instance_list_per_project_cells = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.518052] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.list_records_by_skipping_down_cells = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.518222] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.local_metadata_per_cell = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.518391] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.max_limit = 1000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.518559] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.metadata_cache_expiration = 15 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.518735] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.neutron_default_tenant_id = default {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.518907] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.response_validation = warn {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.519092] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.use_neutron_default_nets = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.519265] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.519430] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.519599] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.519771] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.519943] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_dynamic_targets = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.520120] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_jsonfile_path = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.520306] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.520501] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.backend = dogpile.cache.memcached {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.520667] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.backend_argument = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.520835] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.config_prefix = cache.oslo {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.521009] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.dead_timeout = 60.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.521185] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.debug_cache_backend = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.521348] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.enable_retry_client = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.521514] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.enable_socket_keepalive = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.521684] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.enabled = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.521850] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.enforce_fips_mode = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.522047] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.expiration_time = 600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.522232] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.hashclient_retry_attempts = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.522401] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.hashclient_retry_delay = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.522573] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_dead_retry = 300 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.522792] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_password = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.522974] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.523157] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.523329] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_pool_maxsize = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.523493] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.523659] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_sasl_enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.523842] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.524016] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_socket_timeout = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.524209] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.memcache_username = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.524387] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.proxies = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.524554] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_db = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.524786] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_password = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.524982] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_sentinel_service_name = mymaster {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.525218] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.525408] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_server = localhost:6379 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.525579] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_socket_timeout = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.525742] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.redis_username = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.525905] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.retry_attempts = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.526088] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.retry_delay = 0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.526258] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.socket_keepalive_count = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.526422] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.socket_keepalive_idle = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.526582] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.socket_keepalive_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.526740] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.tls_allowed_ciphers = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.526897] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.tls_cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.527068] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.tls_certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.527234] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.tls_enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.527391] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cache.tls_keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.527560] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.527733] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.auth_type = password {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.527894] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.528085] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.catalog_info = volumev3::publicURL {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.528251] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.528415] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.528574] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.cross_az_attach = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.528736] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.debug = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.528897] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.endpoint_template = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.529068] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.http_retries = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.529237] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.529396] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.529566] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.os_region_name = RegionOne {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.529731] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.529889] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cinder.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.530070] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.530238] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.cpu_dedicated_set = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.530397] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.cpu_shared_set = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.530564] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.image_type_exclude_list = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.530728] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.530890] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.max_concurrent_disk_ops = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.531063] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.max_disk_devices_to_attach = -1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.531230] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.531399] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.531560] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.resource_provider_association_refresh = 300 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.531717] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.531882] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.shutdown_retry_interval = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.532072] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.532254] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] conductor.workers = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.532430] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] console.allowed_origins = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.532591] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] console.ssl_ciphers = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.532761] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] console.ssl_minimum_version = default {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.532929] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] consoleauth.enforce_session_timeout = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.533108] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] consoleauth.token_ttl = 600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.533285] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.533442] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.533605] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.533763] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.533920] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.534116] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.534295] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.534454] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.534614] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.534829] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.535033] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.region_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.535240] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.535417] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.535593] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.service_type = accelerator {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.535758] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.535917] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.536092] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.536258] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.536439] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.536602] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] cyborg.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.536785] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.backend = sqlalchemy {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.536956] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.connection = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.537139] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.connection_debug = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.537311] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.connection_parameters = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.537476] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.connection_recycle_time = 3600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.537639] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.connection_trace = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.537800] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.db_inc_retry_interval = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.537963] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.db_max_retries = 20 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.538138] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.db_max_retry_interval = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.538302] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.db_retry_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.538464] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.max_overflow = 50 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.538624] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.max_pool_size = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.538784] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.max_retries = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.538955] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.539127] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.mysql_wsrep_sync_wait = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.539289] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.pool_timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.539450] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.retry_interval = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.539610] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.slave_connection = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.539768] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.sqlite_synchronous = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.539928] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] database.use_db_reconnect = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.540119] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.backend = sqlalchemy {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.540293] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.connection = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.540454] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.connection_debug = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.540624] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.connection_parameters = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.540785] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.connection_recycle_time = 3600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.540946] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.connection_trace = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.541120] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.db_inc_retry_interval = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.541285] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.db_max_retries = 20 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.541448] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.db_max_retry_interval = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.541608] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.db_retry_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.541769] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.max_overflow = 50 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.541929] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.max_pool_size = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.542104] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.max_retries = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.542279] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.542440] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.542598] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.pool_timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.542759] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.retry_interval = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.542917] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.slave_connection = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.543088] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] api_database.sqlite_synchronous = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.543268] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] devices.enabled_mdev_types = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.543444] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.543614] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ephemeral_storage_encryption.default_format = luks {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.543775] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ephemeral_storage_encryption.enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.543939] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.544160] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.api_servers = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.544332] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.544497] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.544659] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.544816] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.545077] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.545278] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.debug = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.545454] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.default_trusted_certificate_ids = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.545618] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.enable_certificate_validation = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.545781] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.enable_rbd_download = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.545940] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.546122] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.546291] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.546449] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.546606] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.546766] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.num_retries = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.546935] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.rbd_ceph_conf = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.547130] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.rbd_connect_timeout = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.547333] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.rbd_pool = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.547505] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.rbd_user = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.547669] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.region_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.547831] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.547990] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.548175] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.service_type = image {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.548342] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.548508] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.548667] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.548826] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.549015] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.549188] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.verify_glance_signatures = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.549352] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] glance.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.549518] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] guestfs.debug = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.549682] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] mks.enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.550042] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.550239] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] image_cache.manager_interval = 2400 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.550410] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] image_cache.precache_concurrency = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.550581] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] image_cache.remove_unused_base_images = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.550750] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.550920] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.551112] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] image_cache.subdirectory_name = _base {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.551292] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.api_max_retries = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.551507] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.api_retry_interval = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.551701] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.551872] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.auth_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.552049] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.552221] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.552388] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.552552] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.conductor_group = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.552712] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.552872] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.553045] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.553219] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.553382] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.553544] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.553703] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.553871] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.peer_list = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.554043] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.region_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.554232] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.554403] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.serial_console_state_timeout = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.554566] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.554738] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.service_type = baremetal {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.554901] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.shard = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.555113] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.555295] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.555461] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.555621] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.555803] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.555965] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ironic.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.556164] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.556341] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] key_manager.fixed_key = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.556528] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.556693] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.barbican_api_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.556855] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.barbican_endpoint = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.557037] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.barbican_endpoint_type = public {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.557202] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.barbican_region_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.557365] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.557525] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.557688] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.557847] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558020] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558184] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.number_of_retries = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558347] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.retry_delay = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558508] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.send_service_user_token = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558671] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558829] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.558990] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.verify_ssl = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.559162] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican.verify_ssl_path = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.559331] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.559494] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.auth_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.559655] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.559814] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.559976] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.560151] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.560315] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.560478] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.560635] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] barbican_service_user.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.560803] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.approle_role_id = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.560962] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.approle_secret_id = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.561170] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.kv_mountpoint = secret {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.561343] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.kv_path = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.561509] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.kv_version = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.561681] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.namespace = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.561848] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.root_token_id = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.562025] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.ssl_ca_crt_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.562190] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.timeout = 60.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.562357] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.use_ssl = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.562527] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.562697] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.562860] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.auth_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.563034] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.563203] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.563370] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.563529] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.563689] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.563848] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.564019] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.564210] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.564375] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.564533] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.564692] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.region_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.564851] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.565016] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.565232] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.service_type = identity {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.565405] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.565569] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.565729] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.565890] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.566085] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.566257] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] keystone.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.566462] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.connection_uri = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.566625] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_mode = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.566794] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_model_extra_flags = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.566965] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_models = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.567157] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_power_governor_high = performance {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.567327] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_power_governor_low = powersave {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.567492] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_power_management = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.567664] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.567828] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.device_detach_attempts = 8 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.567988] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.device_detach_timeout = 20 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.568168] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.disk_cachemodes = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.568329] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.disk_prefix = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.568494] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.enabled_perf_events = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.568658] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.file_backed_memory = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.568825] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.gid_maps = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.568986] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.hw_disk_discard = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.569159] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.hw_machine_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.569368] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_rbd_ceph_conf = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.569579] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.569753] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.569926] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_rbd_glance_store_name = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.570113] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_rbd_pool = rbd {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.570292] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_type = default {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.570456] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.images_volume_group = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.570617] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.inject_key = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.570781] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.inject_partition = -2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.570942] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.inject_password = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.571120] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.iscsi_iface = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.571289] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.iser_use_multipath = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.571455] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_bandwidth = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.571620] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.571782] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_downtime = 500 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.571945] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.572117] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.572281] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_inbound_addr = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.572445] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.572613] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_permit_post_copy = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.572776] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_scheme = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.572957] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_timeout_action = abort {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.573136] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_tunnelled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.573300] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_uri = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.573462] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.live_migration_with_native_tls = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.573621] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.max_queues = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.573783] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.574027] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.574224] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.nfs_mount_options = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.574524] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.574698] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.574863] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.num_iser_scan_tries = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.575046] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.num_memory_encrypted_guests = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.575256] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.575428] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.num_pcie_ports = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.575597] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.num_volume_scan_tries = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.575765] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.pmem_namespaces = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.575929] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.quobyte_client_cfg = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.576240] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.576417] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rbd_connect_timeout = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.576584] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.576750] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.576914] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rbd_secret_uuid = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.577086] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rbd_user = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.577258] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.577433] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.remote_filesystem_transport = ssh {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.577595] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rescue_image_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.577755] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rescue_kernel_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.577914] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rescue_ramdisk_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.578098] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.578265] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.rx_queue_size = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.578439] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.smbfs_mount_options = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.578712] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.578885] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.snapshot_compression = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.579060] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.snapshot_image_format = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.579288] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.579457] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.sparse_logical_volumes = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.579624] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.swtpm_enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.579797] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.swtpm_group = tss {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.579968] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.swtpm_user = tss {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.580155] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.sysinfo_serial = unique {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.580330] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.tb_cache_size = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.580500] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.tx_queue_size = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.580672] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.uid_maps = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.580837] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.use_virtio_for_bridges = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.581018] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.virt_type = kvm {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.581193] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.volume_clear = zero {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.581361] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.volume_clear_size = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.581527] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.volume_use_multipath = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.581690] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_cache_path = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.581860] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.582039] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_mount_group = qemu {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.582214] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_mount_opts = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.582387] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.582662] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.582838] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.vzstorage_mount_user = stack {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.583013] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.583201] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.583379] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.auth_type = password {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.583543] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.583704] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.583902] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.584125] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.584305] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.584530] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.default_floating_pool = public {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.584711] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.584880] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.extension_sync_interval = 600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.585070] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.http_retries = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.585293] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.585425] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.585587] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.585759] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.585922] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.586105] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.ovs_bridge = br-int {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.586277] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.physnets = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.586450] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.region_name = RegionOne {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.586613] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.586782] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.service_metadata_proxy = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.586943] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.587126] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.service_type = network {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.587294] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.587454] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.587613] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.587769] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.587949] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.588123] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] neutron.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.588296] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] notifications.bdms_in_notifications = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.588476] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] notifications.default_level = INFO {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.588650] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] notifications.notification_format = unversioned {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.588813] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] notifications.notify_on_state_change = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.588987] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.589175] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] pci.alias = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.589349] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] pci.device_spec = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.589513] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] pci.report_in_placement = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.589683] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.589856] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.auth_type = password {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590033] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590199] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590360] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590524] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590680] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590837] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.590994] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.default_domain_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.591175] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.default_domain_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.591342] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.domain_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.591501] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.domain_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.591660] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.591820] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.591979] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.592148] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.592307] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.592474] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.password = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.592631] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.project_domain_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.592797] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.project_domain_name = Default {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.592964] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.project_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.593149] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.project_name = service {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.593320] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.region_name = RegionOne {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.593482] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.593640] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.593808] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.service_type = placement {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.593968] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.594166] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.594329] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.594490] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.system_scope = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.594646] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.594803] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.trust_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.594960] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.user_domain_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.595186] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.user_domain_name = Default {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.595389] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.user_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.595526] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.username = nova {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.595711] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.595870] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] placement.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.596057] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.cores = 20 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.596226] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.count_usage_from_placement = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.596397] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.596570] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.injected_file_content_bytes = 10240 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.596734] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.injected_file_path_length = 255 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.596897] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.injected_files = 5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.597072] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.instances = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.597242] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.key_pairs = 100 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.597405] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.metadata_items = 128 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.597568] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.ram = 51200 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.597729] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.recheck_quota = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.597895] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.server_group_members = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.598074] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] quota.server_groups = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.598248] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.598444] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.598607] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.image_metadata_prefilter = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.598782] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.598949] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.max_attempts = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.599125] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.max_placement_results = 1000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.599288] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.599449] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.query_placement_for_image_type_support = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.599609] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.599779] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] scheduler.workers = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.599951] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.600135] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.600317] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.600487] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.600652] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.600815] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.600986] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.601190] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.601404] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.host_subset_size = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.601602] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.601768] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.601933] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.602117] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.isolated_hosts = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.602287] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.isolated_images = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.602506] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.602677] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.602864] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.603041] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.pci_in_placement = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.603214] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.603378] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.603541] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.603703] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.603868] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.604044] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.604237] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.track_instance_changes = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.604422] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.604596] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metrics.required = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.604762] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metrics.weight_multiplier = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.604925] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.605126] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] metrics.weight_setting = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.605486] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.605619] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] serial_console.enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.605826] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] serial_console.port_range = 10000:20000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.605998] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.606196] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.606367] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] serial_console.serialproxy_port = 6083 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.606533] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.606704] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.auth_type = password {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.606862] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.607029] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.607196] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.607357] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.607513] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.607678] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.send_service_user_token = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.607838] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.608026] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] service_user.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.608190] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.agent_enabled = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.608375] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.608705] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.608898] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.609086] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.html5proxy_port = 6082 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.609253] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.image_compression = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.609413] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.jpeg_compression = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.609570] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.playback_compression = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.609730] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.require_secure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.609898] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.server_listen = 127.0.0.1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.610075] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.610240] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.streaming_mode = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.610398] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] spice.zlib_compression = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.610563] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] upgrade_levels.baseapi = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.610732] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] upgrade_levels.compute = auto {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.610892] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] upgrade_levels.conductor = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.611063] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] upgrade_levels.scheduler = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.611234] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.611397] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.auth_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.611557] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.611715] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.611875] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.612046] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.612210] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.612373] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.612527] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vendordata_dynamic_auth.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.612699] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.api_retry_count = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.612859] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.ca_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.613037] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.cache_prefix = devstack-image-cache {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.613210] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.cluster_name = testcl1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.613377] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.connection_pool_size = 10 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.613535] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.console_delay_seconds = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.613701] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.datastore_regex = ^datastore.* {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.613904] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.614112] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.host_password = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.614297] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.host_port = 443 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.614470] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.host_username = administrator@vsphere.local {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.614641] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.insecure = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.614805] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.integration_bridge = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.614970] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.maximum_objects = 100 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.615186] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.pbm_default_policy = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.615364] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.pbm_enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.615533] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.pbm_wsdl_location = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.615697] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.615855] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.serial_port_proxy_uri = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.616022] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.serial_port_service_uri = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.616189] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.task_poll_interval = 0.5 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.616362] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.use_linked_clone = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.616529] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.vnc_keymap = en-us {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.616693] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.vnc_port = 5900 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.616856] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vmware.vnc_port_total = 10000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.617051] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.auth_schemes = ['none'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.617291] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.617603] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.617792] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.617964] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.novncproxy_port = 6080 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.618154] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.server_listen = 127.0.0.1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.618332] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.618496] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.vencrypt_ca_certs = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.618657] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.vencrypt_client_cert = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.618815] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vnc.vencrypt_client_key = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.618996] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.619176] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.disable_deep_image_inspection = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.619340] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.619502] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.619661] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.619820] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.disable_rootwrap = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.619979] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.enable_numa_live_migration = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.620155] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.620321] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.620482] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.620642] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.libvirt_disable_apic = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.620801] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.620962] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.621136] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.621301] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.621461] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.621621] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.621781] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.621943] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.622118] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.622287] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.622471] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.622641] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.client_socket_timeout = 900 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.622806] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.default_pool_size = 1000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.622975] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.keep_alive = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.623156] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.max_header_line = 16384 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.623322] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.secure_proxy_ssl_header = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.623482] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.ssl_ca_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.623641] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.ssl_cert_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.623798] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.ssl_key_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.623960] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.tcp_keepidle = 600 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.624173] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.624350] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] zvm.ca_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.624513] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] zvm.cloud_connector_url = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.624800] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.624971] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] zvm.reachable_timeout = 300 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.625238] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.enforce_new_defaults = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.625639] env[62585]: WARNING oslo_config.cfg [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 458.625827] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.enforce_scope = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.626018] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.policy_default_rule = default {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.626200] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.626378] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.policy_file = policy.yaml {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.626548] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.626709] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.626869] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.627037] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.627203] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.627372] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.627546] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.627720] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.connection_string = messaging:// {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.627887] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.enabled = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.628067] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.es_doc_type = notification {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.628234] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.es_scroll_size = 10000 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.628400] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.es_scroll_time = 2m {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.628562] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.filter_error_trace = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.628728] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.hmac_keys = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.628895] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.sentinel_service_name = mymaster {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.629070] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.socket_timeout = 0.1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.629237] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.trace_requests = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.629395] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler.trace_sqlalchemy = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.629572] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler_jaeger.process_tags = {} {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.629730] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler_jaeger.service_name_prefix = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.629890] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] profiler_otlp.service_name_prefix = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.630066] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] remote_debug.host = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.630233] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] remote_debug.port = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.630411] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.630573] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.630731] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.630889] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.631059] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.631225] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.631381] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.631539] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.631698] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.631864] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.632029] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.632205] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.632371] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.632538] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.632705] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.632869] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.633038] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.633213] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.633376] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.633535] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.633699] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.633860] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.634029] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.634220] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.634385] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.634546] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.634709] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.634869] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.635057] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.635241] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.ssl = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.635415] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.635583] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.635743] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.635913] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.636092] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.ssl_version = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.636258] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.636443] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.636606] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_notifications.retry = -1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.636789] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.636967] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_messaging_notifications.transport_url = **** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.637153] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.auth_section = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.637319] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.auth_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.637477] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.cafile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.637636] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.certfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.637796] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.collect_timing = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.637953] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.connect_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.638128] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.connect_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.638289] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.endpoint_id = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.638448] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.endpoint_override = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.638606] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.insecure = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.638761] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.keyfile = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.638916] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.max_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.639084] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.min_version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.639300] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.region_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.639487] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.retriable_status_codes = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.639650] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.service_name = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.639812] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.service_type = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.639975] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.split_loggers = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.640147] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.status_code_retries = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.640310] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.status_code_retry_delay = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.640467] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.timeout = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.640626] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.valid_interfaces = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.640784] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_limit.version = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.640950] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_reports.file_event_handler = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.641126] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.641290] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] oslo_reports.log_dir = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.641462] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.641623] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.641783] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.641949] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.642126] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.642289] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.642458] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.642616] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_ovs_privileged.group = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.642773] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.642937] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.643110] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.643275] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] vif_plug_ovs_privileged.user = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.643445] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.flat_interface = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.643622] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.643794] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.643967] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.644180] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.644358] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.644526] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.644690] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.644869] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.645066] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.isolate_vif = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.645271] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.645439] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.645610] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.645785] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.ovsdb_interface = native {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.645950] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] os_vif_ovs.per_port_bridge = False {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.646137] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] privsep_osbrick.capabilities = [21] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.646303] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] privsep_osbrick.group = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.646462] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] privsep_osbrick.helper_command = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.646626] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.646788] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.646946] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] privsep_osbrick.user = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.647133] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.647300] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] nova_sys_admin.group = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.647459] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] nova_sys_admin.helper_command = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.647625] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.647788] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.647947] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] nova_sys_admin.user = None {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 458.648088] env[62585]: DEBUG oslo_service.service [None req-746ab92e-07a6-40fa-a383-eeba2f48b3bf None None] ******************************************************************************** {{(pid=62585) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 458.648576] env[62585]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 459.151616] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Getting list of instances from cluster (obj){ [ 459.151616] env[62585]: value = "domain-c8" [ 459.151616] env[62585]: _type = "ClusterComputeResource" [ 459.151616] env[62585]: } {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 459.152804] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e3875c-e628-4c9b-ab07-f2f5a14abafa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.161816] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Got total of 0 instances {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 459.162372] env[62585]: WARNING nova.virt.vmwareapi.driver [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 459.162831] env[62585]: INFO nova.virt.node [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Generated node identity 66db9ec1-b5c3-45d2-a885-8e338110656b [ 459.163077] env[62585]: INFO nova.virt.node [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Wrote node identity 66db9ec1-b5c3-45d2-a885-8e338110656b to /opt/stack/data/n-cpu-1/compute_id [ 459.665654] env[62585]: WARNING nova.compute.manager [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Compute nodes ['66db9ec1-b5c3-45d2-a885-8e338110656b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 460.670886] env[62585]: INFO nova.compute.manager [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 461.677056] env[62585]: WARNING nova.compute.manager [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 461.677405] env[62585]: DEBUG oslo_concurrency.lockutils [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 461.677503] env[62585]: DEBUG oslo_concurrency.lockutils [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 461.677689] env[62585]: DEBUG oslo_concurrency.lockutils [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 461.677853] env[62585]: DEBUG nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 461.678786] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d946444-0071-49c5-a184-cac0d1b31ca9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.686902] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20183bac-8de6-4cc6-ae16-72c7d99331c7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.700413] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d5851d-8d76-4d6f-a95a-4c0d7ca3f2c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.706308] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04043fb3-04fa-4f0b-9994-1bd9069fdd8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.734535] env[62585]: DEBUG nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181320MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 461.734693] env[62585]: DEBUG oslo_concurrency.lockutils [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 461.734853] env[62585]: DEBUG oslo_concurrency.lockutils [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 462.237500] env[62585]: WARNING nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] No compute node record for cpu-1:66db9ec1-b5c3-45d2-a885-8e338110656b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 66db9ec1-b5c3-45d2-a885-8e338110656b could not be found. [ 462.741269] env[62585]: INFO nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 66db9ec1-b5c3-45d2-a885-8e338110656b [ 464.248771] env[62585]: DEBUG nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 464.249163] env[62585]: DEBUG nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 464.400140] env[62585]: INFO nova.scheduler.client.report [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] [req-96669839-cdfc-488a-9038-66e197301853] Created resource provider record via placement API for resource provider with UUID 66db9ec1-b5c3-45d2-a885-8e338110656b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 464.417370] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e65551-b693-43af-af67-1792fe7c1635 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 464.425400] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c773624f-e625-49a3-a499-8389b852b17a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 464.454598] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73e76c3-89e6-44d2-be99-b540163dbd3e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 464.461697] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95da6add-7e40-43e3-82a8-5eeca0593e8e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 464.474263] env[62585]: DEBUG nova.compute.provider_tree [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 465.015081] env[62585]: DEBUG nova.scheduler.client.report [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Updated inventory for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 465.015081] env[62585]: DEBUG nova.compute.provider_tree [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Updating resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b generation from 0 to 1 during operation: update_inventory {{(pid=62585) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 465.015081] env[62585]: DEBUG nova.compute.provider_tree [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 465.068834] env[62585]: DEBUG nova.compute.provider_tree [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Updating resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b generation from 1 to 2 during operation: update_traits {{(pid=62585) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 465.574022] env[62585]: DEBUG nova.compute.resource_tracker [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 465.574022] env[62585]: DEBUG oslo_concurrency.lockutils [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.837s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 465.574022] env[62585]: DEBUG nova.service [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Creating RPC server for service compute {{(pid=62585) start /opt/stack/nova/nova/service.py:186}} [ 465.591038] env[62585]: DEBUG nova.service [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] Join ServiceGroup membership for this service compute {{(pid=62585) start /opt/stack/nova/nova/service.py:203}} [ 465.591502] env[62585]: DEBUG nova.servicegroup.drivers.db [None req-25d10c61-5b2a-4037-bc72-25fcd87111b3 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62585) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 504.205701] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "d8d432cc-07e0-4aac-9ad0-88a621173835" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.206025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "d8d432cc-07e0-4aac-9ad0-88a621173835" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.711378] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 505.251533] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.251802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.253660] env[62585]: INFO nova.compute.claims [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 505.482312] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquiring lock "bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.482747] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Lock "bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.986749] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 506.334202] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3358f5-f9b3-429d-8a19-c92a121bed2b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.344691] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e9fdaf-85eb-488e-a8d1-041db5dfb0fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.376943] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4e284c-60a5-438f-a8f1-74c05ff14c2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.384221] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e3505b-b560-43b1-b0fb-f3c14f2e7240 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.397748] env[62585]: DEBUG nova.compute.provider_tree [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 506.411074] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquiring lock "1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.411074] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Lock "1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.511848] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.901529] env[62585]: DEBUG nova.scheduler.client.report [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 506.913733] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 507.075685] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "40fd1fff-1df0-43b6-9cce-a666ecd63199" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.075685] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "40fd1fff-1df0-43b6-9cce-a666ecd63199" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.408741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 507.408741] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 507.409849] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.898s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.411514] env[62585]: INFO nova.compute.claims [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 507.452520] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.578949] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 507.624585] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "779efd7e-99d5-4065-8ade-1665533677a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.625213] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "779efd7e-99d5-4065-8ade-1665533677a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.917388] env[62585]: DEBUG nova.compute.utils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 507.919907] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 507.919907] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 508.107535] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.131274] env[62585]: DEBUG nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 508.404847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquiring lock "916af5db-2e20-4156-9048-148f0f6253cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.404847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Lock "916af5db-2e20-4156-9048-148f0f6253cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 508.421093] env[62585]: DEBUG nova.policy [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6e238fbd13844778d829bbbd5564560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7767e044b60d4e0c8b04051967ec97d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 508.424783] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 508.557324] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047f8b47-59a6-4fb2-aa06-0b96dfadd6f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.569743] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49988d91-9171-497e-ae7f-5f7d68af94c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.603030] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debd7137-26ed-439f-abf4-6c76c57f6f4b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.610617] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796c673b-029c-4092-b0e3-da5e41aac13d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.624438] env[62585]: DEBUG nova.compute.provider_tree [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 508.663966] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.908011] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 509.129508] env[62585]: DEBUG nova.scheduler.client.report [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 509.260982] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Successfully created port: dfee4c92-ac3f-4c83-b53e-451425f039b1 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 509.440080] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 509.446589] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.486632] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 509.487107] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 509.487785] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 509.491019] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 509.491019] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 509.491019] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 509.491019] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 509.491019] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 509.491357] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 509.491357] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 509.491357] env[62585]: DEBUG nova.virt.hardware [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 509.491357] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d00b5e-b724-4b20-a4a6-44d19d693108 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.503439] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58f5695-809f-4a70-a76a-1563935faa10 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.533660] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2ef92c-d27a-4819-98ef-d2ff739d1064 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.638053] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 509.638666] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 509.641329] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.189s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.642646] env[62585]: INFO nova.compute.claims [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 510.151562] env[62585]: DEBUG nova.compute.utils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 510.155922] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 510.156373] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 510.411101] env[62585]: DEBUG nova.policy [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3f321bfa8814814808fe79c9c887d76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '203c5fd5d3f94a23b8bbd9aa4f1580e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 510.656760] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 510.793665] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51261e37-0b7a-4c1d-8e1a-f54cbd266530 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.802300] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aca1d29-4f8e-47d3-ae59-3def5e377122 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.838713] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bc97a7-c4f2-4177-9d65-aa0c0df9929a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.847370] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851e0d41-ae0b-474b-8cdf-ccb47f8250bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.861937] env[62585]: DEBUG nova.compute.provider_tree [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 511.365543] env[62585]: DEBUG nova.scheduler.client.report [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 511.384556] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Successfully created port: d73950ff-ebe1-4fc5-8f22-da4a90362254 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 511.669105] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 511.701491] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 511.701881] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 511.701881] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 511.702290] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 511.702565] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 511.702625] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 511.702789] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 511.702939] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 511.703112] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 511.703289] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 511.703432] env[62585]: DEBUG nova.virt.hardware [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 511.704371] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1121ce-84d8-4092-ace9-05363404e252 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.712937] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9ae087-dc2a-43da-b0a3-0f7025fb3569 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.873221] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.873221] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 511.874768] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.767s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.876918] env[62585]: INFO nova.compute.claims [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 512.211702] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "f8ac8468-a804-4d0f-a0e8-864eb7064074" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.211702] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "f8ac8468-a804-4d0f-a0e8-864eb7064074" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.381300] env[62585]: DEBUG nova.compute.utils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 512.382912] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 512.383078] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 512.473226] env[62585]: DEBUG nova.policy [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45b199fa05014692a28dbeb376fb658e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9fd1bbf198a445b803fa6d30251b0bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 512.593950] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.713928] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 512.890395] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 513.025667] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e5b859-35a6-44b4-9574-503eaae8a995 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.035464] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1779cd67-c507-4562-a298-9da337758fe7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.073270] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04718bf4-5c22-47cf-a395-b2f685d5eb1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.080804] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b1de96-a505-45c9-b861-eff847aa93de {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.096081] env[62585]: DEBUG nova.compute.provider_tree [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 513.099527] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Getting list of instances from cluster (obj){ [ 513.099527] env[62585]: value = "domain-c8" [ 513.099527] env[62585]: _type = "ClusterComputeResource" [ 513.099527] env[62585]: } {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 513.100865] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf2ada8-1bac-44e3-9035-f11e75680a7a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.110435] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Got total of 0 instances {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 513.110435] env[62585]: WARNING nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] While synchronizing instance power states, found 4 instances in the database and 0 instances on the hypervisor. [ 513.110569] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid d8d432cc-07e0-4aac-9ad0-88a621173835 {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 513.110717] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid bc1b883b-32e4-45a8-b785-0eb53bbd7ae9 {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 513.110866] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8 {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 513.111343] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid 40fd1fff-1df0-43b6-9cce-a666ecd63199 {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 513.111343] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "d8d432cc-07e0-4aac-9ad0-88a621173835" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.111523] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.111714] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.111892] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "40fd1fff-1df0-43b6-9cce-a666ecd63199" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.112074] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 513.112399] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Getting list of instances from cluster (obj){ [ 513.112399] env[62585]: value = "domain-c8" [ 513.112399] env[62585]: _type = "ClusterComputeResource" [ 513.112399] env[62585]: } {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 513.114906] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4e44e3-96f1-4118-bfcb-2046eb7af8bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.120384] env[62585]: ERROR nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 513.120384] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 513.120384] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 513.120384] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 513.120384] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 513.120384] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 513.120384] env[62585]: ERROR nova.compute.manager raise self.value [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 513.120384] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 513.120384] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 513.120384] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 513.120933] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 513.120933] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 513.120933] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 513.120933] env[62585]: ERROR nova.compute.manager [ 513.120933] env[62585]: Traceback (most recent call last): [ 513.120933] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 513.120933] env[62585]: listener.cb(fileno) [ 513.120933] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 513.120933] env[62585]: result = function(*args, **kwargs) [ 513.120933] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 513.120933] env[62585]: return func(*args, **kwargs) [ 513.120933] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 513.120933] env[62585]: raise e [ 513.120933] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 513.120933] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 513.120933] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 513.120933] env[62585]: created_port_ids = self._update_ports_for_instance( [ 513.120933] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 513.120933] env[62585]: with excutils.save_and_reraise_exception(): [ 513.120933] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 513.120933] env[62585]: self.force_reraise() [ 513.120933] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 513.120933] env[62585]: raise self.value [ 513.120933] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 513.120933] env[62585]: updated_port = self._update_port( [ 513.120933] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 513.120933] env[62585]: _ensure_no_port_binding_failure(port) [ 513.120933] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 513.120933] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 513.121952] env[62585]: nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 513.121952] env[62585]: Removing descriptor: 15 [ 513.121952] env[62585]: ERROR nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Traceback (most recent call last): [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] yield resources [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self.driver.spawn(context, instance, image_meta, [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 513.121952] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] vm_ref = self.build_virtual_machine(instance, [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] vif_infos = vmwarevif.get_vif_info(self._session, [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] for vif in network_info: [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return self._sync_wrapper(fn, *args, **kwargs) [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self.wait() [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self[:] = self._gt.wait() [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return self._exit_event.wait() [ 513.122276] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] result = hub.switch() [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return self.greenlet.switch() [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] result = function(*args, **kwargs) [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return func(*args, **kwargs) [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] raise e [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] nwinfo = self.network_api.allocate_for_instance( [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 513.122639] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] created_port_ids = self._update_ports_for_instance( [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] with excutils.save_and_reraise_exception(): [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self.force_reraise() [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] raise self.value [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] updated_port = self._update_port( [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] _ensure_no_port_binding_failure(port) [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 513.122977] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] raise exception.PortBindingFailed(port_id=port['id']) [ 513.123285] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 513.123285] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] [ 513.123285] env[62585]: INFO nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Terminating instance [ 513.126929] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 513.127225] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquired lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 513.128069] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 513.140988] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Got total of 0 instances {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 513.246716] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.358226] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Successfully created port: 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 513.598398] env[62585]: DEBUG nova.scheduler.client.report [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 513.724733] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 513.916857] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 513.945220] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 513.945220] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 513.945220] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 513.945636] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 513.945636] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 513.945636] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 513.945636] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 513.945636] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 513.946320] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 513.946766] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 513.947243] env[62585]: DEBUG nova.virt.hardware [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 513.948359] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a250b1e-870c-4bdf-b6e8-d10ed99a7439 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.964262] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a772aa-5775-4d00-b02a-b564c902aef6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.018838] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 514.090159] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.091512] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.091512] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 514.091512] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Rebuilding the list of instances to heal {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 514.105571] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.106172] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 514.111947] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.448s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.113526] env[62585]: INFO nova.compute.claims [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 514.244757] env[62585]: DEBUG nova.compute.manager [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Received event network-changed-dfee4c92-ac3f-4c83-b53e-451425f039b1 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 514.244910] env[62585]: DEBUG nova.compute.manager [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Refreshing instance network info cache due to event network-changed-dfee4c92-ac3f-4c83-b53e-451425f039b1. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 514.245412] env[62585]: DEBUG oslo_concurrency.lockutils [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] Acquiring lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 514.523731] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Releasing lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 514.524649] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 514.528255] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 514.528255] env[62585]: DEBUG oslo_concurrency.lockutils [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] Acquired lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 514.528255] env[62585]: DEBUG nova.network.neutron [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Refreshing network info cache for port dfee4c92-ac3f-4c83-b53e-451425f039b1 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 514.531576] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c180c0d-cf96-4eea-8afd-93bb8f0adc14 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.541941] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfd7518-1841-472b-aad7-7e9efe6e53c7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.570543] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8d432cc-07e0-4aac-9ad0-88a621173835 could not be found. [ 514.570672] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 514.571095] env[62585]: INFO nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Took 0.04 seconds to destroy the instance on the hypervisor. [ 514.571350] env[62585]: DEBUG oslo.service.loopingcall [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 514.571553] env[62585]: DEBUG nova.compute.manager [-] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 514.571643] env[62585]: DEBUG nova.network.neutron [-] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 514.596151] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 514.596244] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 514.596364] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 514.596489] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 514.596610] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Didn't find any instances for network info cache update. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 514.603060] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.603619] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquiring lock "106dca6d-1ddf-4315-b645-c52c7c59f5d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.603880] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Lock "106dca6d-1ddf-4315-b645-c52c7c59f5d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.604884] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.605522] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.605733] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.605919] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.606126] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.606321] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 514.606475] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 514.618961] env[62585]: DEBUG nova.compute.utils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 514.623176] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 514.623274] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 514.635640] env[62585]: DEBUG nova.network.neutron [-] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 514.862769] env[62585]: DEBUG nova.policy [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db548f8d8ee4db383f03d417c896a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b11f7597d0aa4cb28fed83803589041b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 515.065707] env[62585]: DEBUG nova.network.neutron [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 515.107927] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 515.112694] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.130911] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 515.141079] env[62585]: DEBUG nova.network.neutron [-] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 515.145655] env[62585]: ERROR nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 515.145655] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 515.145655] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 515.145655] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 515.145655] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.145655] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.145655] env[62585]: ERROR nova.compute.manager raise self.value [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 515.145655] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 515.145655] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.145655] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 515.146165] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.146165] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 515.146165] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 515.146165] env[62585]: ERROR nova.compute.manager [ 515.146165] env[62585]: Traceback (most recent call last): [ 515.146165] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 515.146165] env[62585]: listener.cb(fileno) [ 515.146165] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.146165] env[62585]: result = function(*args, **kwargs) [ 515.146165] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 515.146165] env[62585]: return func(*args, **kwargs) [ 515.146165] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 515.146165] env[62585]: raise e [ 515.146165] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 515.146165] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 515.146165] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 515.146165] env[62585]: created_port_ids = self._update_ports_for_instance( [ 515.146165] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 515.146165] env[62585]: with excutils.save_and_reraise_exception(): [ 515.146165] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.146165] env[62585]: self.force_reraise() [ 515.146165] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.146165] env[62585]: raise self.value [ 515.146165] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 515.146165] env[62585]: updated_port = self._update_port( [ 515.146165] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.146165] env[62585]: _ensure_no_port_binding_failure(port) [ 515.146165] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.146165] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 515.146910] env[62585]: nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 515.146910] env[62585]: Removing descriptor: 16 [ 515.146910] env[62585]: ERROR nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Traceback (most recent call last): [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] yield resources [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self.driver.spawn(context, instance, image_meta, [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 515.146910] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] vm_ref = self.build_virtual_machine(instance, [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] vif_infos = vmwarevif.get_vif_info(self._session, [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] for vif in network_info: [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return self._sync_wrapper(fn, *args, **kwargs) [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self.wait() [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self[:] = self._gt.wait() [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return self._exit_event.wait() [ 515.147287] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] result = hub.switch() [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return self.greenlet.switch() [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] result = function(*args, **kwargs) [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return func(*args, **kwargs) [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] raise e [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] nwinfo = self.network_api.allocate_for_instance( [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 515.147626] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] created_port_ids = self._update_ports_for_instance( [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] with excutils.save_and_reraise_exception(): [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self.force_reraise() [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] raise self.value [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] updated_port = self._update_port( [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] _ensure_no_port_binding_failure(port) [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.147956] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] raise exception.PortBindingFailed(port_id=port['id']) [ 515.148296] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 515.148296] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] [ 515.148296] env[62585]: INFO nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Terminating instance [ 515.149475] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquiring lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.149882] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquired lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.150111] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 515.248283] env[62585]: DEBUG nova.network.neutron [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 515.471403] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b16682f-34ca-44d6-bc7f-765c8315e40a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.481324] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b56674-3ca9-41e6-b8eb-fc0f2fc22ff3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.521775] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d63b98-159b-4d75-b841-e710dbd7c624 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.533635] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b87ccd-fbf9-404d-a918-07d7118f0b04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.548843] env[62585]: DEBUG nova.compute.provider_tree [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.621451] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "598c7b4f-8239-45af-8bc5-caf6b47172ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.622965] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "598c7b4f-8239-45af-8bc5-caf6b47172ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.647686] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.650540] env[62585]: INFO nova.compute.manager [-] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Took 1.08 seconds to deallocate network for instance. [ 515.652561] env[62585]: DEBUG nova.compute.claims [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 515.652561] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.729118] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 515.754183] env[62585]: DEBUG oslo_concurrency.lockutils [req-d7d8dcce-e5eb-4466-8875-c04715f0cf78 req-bce57ae9-1224-4f7d-9cd2-f19e7a37c575 service nova] Releasing lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 515.936401] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquiring lock "3d05d741-1b46-4646-8269-f72dc6ad5cbd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.936401] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Lock "3d05d741-1b46-4646-8269-f72dc6ad5cbd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.942009] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.056468] env[62585]: DEBUG nova.scheduler.client.report [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 516.127559] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.147893] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 516.191123] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 516.191404] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 516.191558] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 516.191741] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 516.192276] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 516.193073] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 516.194859] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 516.194944] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 516.195178] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 516.195344] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 516.195625] env[62585]: DEBUG nova.virt.hardware [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 516.196793] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d0a2cf-bc05-49e3-b7bd-a156e9db100b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.212447] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72457d90-a314-4c27-a2d9-41545f8dc2d9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.291251] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Successfully created port: 7f49104d-4e7d-451d-b8e9-a605cabd3b1a {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 516.440510] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.447877] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Releasing lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 516.449677] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 516.449677] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 516.449677] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6647fa9d-9aea-4306-be79-a15f208721df {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.457801] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcd88fb-d1f1-41af-a7bc-c87f64ee73e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.489334] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bc1b883b-32e4-45a8-b785-0eb53bbd7ae9 could not be found. [ 516.489334] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 516.489334] env[62585]: INFO nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 516.489334] env[62585]: DEBUG oslo.service.loopingcall [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 516.489334] env[62585]: DEBUG nova.compute.manager [-] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 516.489334] env[62585]: DEBUG nova.network.neutron [-] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 516.521711] env[62585]: DEBUG nova.network.neutron [-] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 516.562302] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 516.562843] env[62585]: DEBUG nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 516.565816] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.120s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.567634] env[62585]: INFO nova.compute.claims [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.664407] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.970047] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.025352] env[62585]: DEBUG nova.network.neutron [-] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.074130] env[62585]: DEBUG nova.compute.utils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 517.078470] env[62585]: DEBUG nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 517.529319] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "dd387320-7101-440c-80bc-a7d19a654df8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.529658] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "dd387320-7101-440c-80bc-a7d19a654df8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.530013] env[62585]: INFO nova.compute.manager [-] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Took 1.04 seconds to deallocate network for instance. [ 517.534345] env[62585]: DEBUG nova.compute.claims [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 517.534345] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.579143] env[62585]: DEBUG nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 517.587457] env[62585]: DEBUG nova.compute.manager [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Received event network-vif-deleted-dfee4c92-ac3f-4c83-b53e-451425f039b1 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 517.587627] env[62585]: DEBUG nova.compute.manager [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Received event network-changed-d73950ff-ebe1-4fc5-8f22-da4a90362254 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 517.587863] env[62585]: DEBUG nova.compute.manager [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Refreshing instance network info cache due to event network-changed-d73950ff-ebe1-4fc5-8f22-da4a90362254. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 517.587995] env[62585]: DEBUG oslo_concurrency.lockutils [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] Acquiring lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 517.588227] env[62585]: DEBUG oslo_concurrency.lockutils [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] Acquired lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 517.588329] env[62585]: DEBUG nova.network.neutron [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Refreshing network info cache for port d73950ff-ebe1-4fc5-8f22-da4a90362254 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 517.807585] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80e4f72-b52c-43dc-9a14-3b40cafee0a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.818736] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e03c182-7fa3-41e4-8017-7eca526b2328 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.856804] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5ad13e-29b7-4d4a-964f-a8649c241fb7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.869630] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ee152-c462-460d-aa37-1dd9071d3c39 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.887181] env[62585]: DEBUG nova.compute.provider_tree [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.170027] env[62585]: DEBUG nova.network.neutron [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 518.197653] env[62585]: ERROR nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 518.197653] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.197653] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.197653] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.197653] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.197653] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.197653] env[62585]: ERROR nova.compute.manager raise self.value [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.197653] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 518.197653] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.197653] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 518.198094] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.198094] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 518.198094] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 518.198094] env[62585]: ERROR nova.compute.manager [ 518.198254] env[62585]: Traceback (most recent call last): [ 518.198254] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 518.198254] env[62585]: listener.cb(fileno) [ 518.198254] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.198254] env[62585]: result = function(*args, **kwargs) [ 518.198254] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 518.198254] env[62585]: return func(*args, **kwargs) [ 518.198254] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.198254] env[62585]: raise e [ 518.198254] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.198254] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 518.198254] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.198254] env[62585]: created_port_ids = self._update_ports_for_instance( [ 518.198254] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.198254] env[62585]: with excutils.save_and_reraise_exception(): [ 518.198254] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.198254] env[62585]: self.force_reraise() [ 518.198254] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.198254] env[62585]: raise self.value [ 518.198254] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.198254] env[62585]: updated_port = self._update_port( [ 518.198254] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.198254] env[62585]: _ensure_no_port_binding_failure(port) [ 518.198254] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.198254] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 518.198254] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 518.198254] env[62585]: Removing descriptor: 17 [ 518.200341] env[62585]: ERROR nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Traceback (most recent call last): [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] yield resources [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self.driver.spawn(context, instance, image_meta, [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] vm_ref = self.build_virtual_machine(instance, [ 518.200341] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] for vif in network_info: [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return self._sync_wrapper(fn, *args, **kwargs) [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self.wait() [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self[:] = self._gt.wait() [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return self._exit_event.wait() [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 518.200718] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] result = hub.switch() [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return self.greenlet.switch() [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] result = function(*args, **kwargs) [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return func(*args, **kwargs) [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] raise e [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] nwinfo = self.network_api.allocate_for_instance( [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] created_port_ids = self._update_ports_for_instance( [ 518.201063] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] with excutils.save_and_reraise_exception(): [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self.force_reraise() [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] raise self.value [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] updated_port = self._update_port( [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] _ensure_no_port_binding_failure(port) [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] raise exception.PortBindingFailed(port_id=port['id']) [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 518.201461] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] [ 518.201965] env[62585]: INFO nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Terminating instance [ 518.203773] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquiring lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.204049] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquired lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.204574] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 518.369573] env[62585]: DEBUG nova.network.neutron [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.391343] env[62585]: DEBUG nova.scheduler.client.report [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 518.417937] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquiring lock "1531ed40-29c2-4812-afd5-eabffe22f4ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.418227] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Lock "1531ed40-29c2-4812-afd5-eabffe22f4ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.444150] env[62585]: DEBUG nova.compute.manager [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Received event network-changed-83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 518.444331] env[62585]: DEBUG nova.compute.manager [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Refreshing instance network info cache due to event network-changed-83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 518.444510] env[62585]: DEBUG oslo_concurrency.lockutils [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] Acquiring lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.601988] env[62585]: DEBUG nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 518.640783] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 518.641054] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 518.641208] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 518.641386] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 518.641547] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 518.641964] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 518.642237] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 518.642595] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 518.642786] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 518.643063] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 518.643138] env[62585]: DEBUG nova.virt.hardware [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 518.644048] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7c72a8-6e83-40d3-93f7-2c24505c79fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.655404] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74619fcb-a317-467f-8d30-d62d29f70c92 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.670715] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 518.681394] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 518.681569] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22e19fef-f722-4b26-a91d-0c03b81eebfe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.696395] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Created folder: OpenStack in parent group-v4. [ 518.696601] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Creating folder: Project (b2e7fd0da2824a509df4cd5734d83a2e). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 518.697247] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f3fe8a0-dd9f-43dc-9cbd-3d06a71d2fb9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.706232] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Created folder: Project (b2e7fd0da2824a509df4cd5734d83a2e) in parent group-v293962. [ 518.706519] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Creating folder: Instances. Parent ref: group-v293963. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 518.707303] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc072493-b5a6-4c21-9c84-14f67329a921 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.722058] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Created folder: Instances in parent group-v293963. [ 518.722304] env[62585]: DEBUG oslo.service.loopingcall [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 518.722492] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 518.722689] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cf1de3e-eb75-4423-9ea6-cb003ddccf34 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.744318] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 518.744318] env[62585]: value = "task-1384576" [ 518.744318] env[62585]: _type = "Task" [ 518.744318] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 518.753898] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384576, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 518.783644] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 518.874187] env[62585]: DEBUG oslo_concurrency.lockutils [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] Releasing lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 518.874187] env[62585]: DEBUG nova.compute.manager [req-9f4f28f1-5f0f-485a-b094-9d0604ae2076 req-037b26c5-ba64-4e19-b5e4-8919d4ae4803 service nova] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Received event network-vif-deleted-d73950ff-ebe1-4fc5-8f22-da4a90362254 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 518.903087] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.904241] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 518.911570] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.663s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.914203] env[62585]: INFO nova.compute.claims [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.129802] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.259738] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384576, 'name': CreateVM_Task, 'duration_secs': 0.382907} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 519.260210] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 519.261105] env[62585]: DEBUG oslo_vmware.service [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8a33a9-3b1c-479f-acc1-75965f174eea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.273478] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.273648] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.274424] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 519.274753] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-320d7c20-52cb-4413-aef1-69d3d87c07ff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.280211] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 519.280211] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52549bd7-ebd8-b767-1023-b5c9b87e81ba" [ 519.280211] env[62585]: _type = "Task" [ 519.280211] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 519.300495] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.300495] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 519.301555] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.301555] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.301555] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 519.302029] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e70385f-4ec6-4884-be30-4e35826e5326 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.319106] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 519.319106] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 519.319815] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c161134-0d31-4388-8c94-b7d83c1bf5f4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.330511] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a549455-aa5f-46dd-965f-6a53116c1fca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.336257] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 519.336257] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eee624-8c04-6581-5627-cb2f052d96f8" [ 519.336257] env[62585]: _type = "Task" [ 519.336257] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 519.349112] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eee624-8c04-6581-5627-cb2f052d96f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 519.423758] env[62585]: DEBUG nova.compute.utils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.431325] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 519.431325] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 519.635416] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Releasing lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.636480] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 519.636827] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 519.637134] env[62585]: DEBUG oslo_concurrency.lockutils [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] Acquired lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.637324] env[62585]: DEBUG nova.network.neutron [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Refreshing network info cache for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 519.638429] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e649aff-5a5c-4648-827b-7937176767c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.649403] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003f1281-eb11-4139-8018-952668987622 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.682338] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8 could not be found. [ 519.682930] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 519.682930] env[62585]: INFO nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 519.682930] env[62585]: DEBUG oslo.service.loopingcall [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 519.684428] env[62585]: DEBUG nova.compute.manager [-] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 519.684522] env[62585]: DEBUG nova.network.neutron [-] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 519.751448] env[62585]: DEBUG nova.network.neutron [-] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 519.799629] env[62585]: DEBUG nova.policy [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f55fe7fb398647729cae1110873e110d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f5ad7aa04a443bebf3c2a1556b16962', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.851214] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Preparing fetch location {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 519.854346] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Creating directory with path [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 519.854346] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbafb09e-459f-4247-99cb-fcfbb9c855a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.886200] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Created directory with path [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 519.886200] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Fetch image to [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 519.886200] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Downloading image file data 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk on the data store datastore1 {{(pid=62585) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 519.886849] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9813002d-25fe-44b3-a5a5-94d80346d71e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.897916] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5908249a-6932-44ba-8eda-1a250fd4d9d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.915861] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f6bc96-c3f2-4366-90d9-f76d6266f33c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.961311] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 519.974188] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a7ee8a-6b36-47e1-9413-dfdff9c33f80 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.983583] env[62585]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01e1bcb4-1da9-4de7-9c7f-9ff5430ede01 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.049561] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "149bd77b-9583-42e5-8c82-f795cac53b87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.049843] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "149bd77b-9583-42e5-8c82-f795cac53b87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.073419] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Downloading image file data 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to the data store datastore1 {{(pid=62585) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 520.177180] env[62585]: DEBUG oslo_vmware.rw_handles [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62585) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 520.256216] env[62585]: DEBUG nova.network.neutron [-] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.333199] env[62585]: DEBUG nova.network.neutron [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.353988] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b73f67d-efe3-41d6-be01-47a2d4bbf13e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.379818] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8590ec7-edc4-41ce-b6dd-6d7d5809d640 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.433843] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d1b75d-f00e-4135-b089-acd4a3d13b3a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.446477] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa8fb70-0e0c-47e8-b6e4-db7431237fbb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.469475] env[62585]: DEBUG nova.compute.provider_tree [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 520.702716] env[62585]: DEBUG nova.network.neutron [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.760256] env[62585]: INFO nova.compute.manager [-] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Took 1.08 seconds to deallocate network for instance. [ 520.765073] env[62585]: DEBUG nova.compute.claims [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 520.765406] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.930360] env[62585]: DEBUG oslo_vmware.rw_handles [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Completed reading data from the image iterator. {{(pid=62585) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 520.930612] env[62585]: DEBUG oslo_vmware.rw_handles [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 520.986019] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 520.988309] env[62585]: DEBUG nova.scheduler.client.report [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 521.002902] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Downloaded image file data 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk on the data store datastore1 {{(pid=62585) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 521.004814] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Caching image {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 521.004814] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Copying Virtual Disk [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk to [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 521.004971] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d447cbb-5f28-4689-9fe3-fcf9349013fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.015688] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 521.015688] env[62585]: value = "task-1384577" [ 521.015688] env[62585]: _type = "Task" [ 521.015688] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 521.030392] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 521.034338] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 521.035600] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 521.035888] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 521.036033] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 521.036155] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 521.037203] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 521.037203] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 521.037203] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 521.037203] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 521.037203] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 521.037478] env[62585]: DEBUG nova.virt.hardware [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 521.038241] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74170b0-6843-4d66-870c-5031523016a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.050675] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8af4755-25b3-4608-9ab8-d3025e10c2f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.202790] env[62585]: DEBUG nova.compute.manager [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Received event network-vif-deleted-83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 521.203037] env[62585]: DEBUG nova.compute.manager [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Received event network-changed-7f49104d-4e7d-451d-b8e9-a605cabd3b1a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 521.203168] env[62585]: DEBUG nova.compute.manager [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Refreshing instance network info cache due to event network-changed-7f49104d-4e7d-451d-b8e9-a605cabd3b1a. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 521.203725] env[62585]: DEBUG oslo_concurrency.lockutils [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] Acquiring lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.203725] env[62585]: DEBUG oslo_concurrency.lockutils [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] Acquired lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.203725] env[62585]: DEBUG nova.network.neutron [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Refreshing network info cache for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 521.207483] env[62585]: DEBUG oslo_concurrency.lockutils [req-6f960f39-9bb8-48cc-9e45-23c65847bfed req-96908dce-ed68-4071-9c4b-9587f11ad230 service nova] Releasing lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.244195] env[62585]: ERROR nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 521.244195] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.244195] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.244195] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.244195] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.244195] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.244195] env[62585]: ERROR nova.compute.manager raise self.value [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.244195] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 521.244195] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.244195] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 521.244981] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.244981] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 521.244981] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 521.244981] env[62585]: ERROR nova.compute.manager [ 521.244981] env[62585]: Traceback (most recent call last): [ 521.244981] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 521.244981] env[62585]: listener.cb(fileno) [ 521.244981] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.244981] env[62585]: result = function(*args, **kwargs) [ 521.244981] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.244981] env[62585]: return func(*args, **kwargs) [ 521.244981] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.244981] env[62585]: raise e [ 521.244981] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.244981] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 521.244981] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.244981] env[62585]: created_port_ids = self._update_ports_for_instance( [ 521.244981] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.244981] env[62585]: with excutils.save_and_reraise_exception(): [ 521.244981] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.244981] env[62585]: self.force_reraise() [ 521.244981] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.244981] env[62585]: raise self.value [ 521.244981] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.244981] env[62585]: updated_port = self._update_port( [ 521.244981] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.244981] env[62585]: _ensure_no_port_binding_failure(port) [ 521.244981] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.244981] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 521.246468] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 521.246468] env[62585]: Removing descriptor: 18 [ 521.246468] env[62585]: ERROR nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Traceback (most recent call last): [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] yield resources [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self.driver.spawn(context, instance, image_meta, [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self._vmops.spawn(context, instance, image_meta, injected_files, [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 521.246468] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] vm_ref = self.build_virtual_machine(instance, [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] vif_infos = vmwarevif.get_vif_info(self._session, [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] for vif in network_info: [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return self._sync_wrapper(fn, *args, **kwargs) [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self.wait() [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self[:] = self._gt.wait() [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return self._exit_event.wait() [ 521.246786] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] result = hub.switch() [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return self.greenlet.switch() [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] result = function(*args, **kwargs) [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return func(*args, **kwargs) [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] raise e [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] nwinfo = self.network_api.allocate_for_instance( [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.247135] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] created_port_ids = self._update_ports_for_instance( [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] with excutils.save_and_reraise_exception(): [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self.force_reraise() [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] raise self.value [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] updated_port = self._update_port( [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] _ensure_no_port_binding_failure(port) [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.247641] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] raise exception.PortBindingFailed(port_id=port['id']) [ 521.247983] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 521.247983] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] [ 521.247983] env[62585]: INFO nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Terminating instance [ 521.250370] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.419775] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Successfully created port: 23d648f2-c6c6-4932-91e9-76c55fe82637 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.499026] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.499573] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 521.502675] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.392s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.502860] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.503051] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 521.503374] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.856s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.506821] env[62585]: INFO nova.compute.claims [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 521.513940] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f412e054-b359-48aa-ba7a-b84d1eea20fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.534299] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a1d95a-d948-4e20-bd20-d1b8cbedba60 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.552780] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dec0705-d4cf-457b-9f5f-376014b5e4e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.559154] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384577, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 521.566054] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fbd329-1af4-4d31-8fc5-02ba57c27973 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.604421] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181314MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 521.604626] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.743183] env[62585]: DEBUG nova.network.neutron [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 521.870594] env[62585]: DEBUG nova.network.neutron [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.014968] env[62585]: DEBUG nova.compute.utils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 522.017830] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 522.017830] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 522.035354] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711573} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 522.035813] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Copied Virtual Disk [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk to [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 522.035992] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleting the datastore file [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 522.039606] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d7d4dbc-df5c-4ab9-9236-2c172a699c4e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.050894] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 522.050894] env[62585]: value = "task-1384578" [ 522.050894] env[62585]: _type = "Task" [ 522.050894] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 522.059898] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384578, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 522.165875] env[62585]: DEBUG nova.policy [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a9c52abe52a412e9a0cc9fddc22d24f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22bdec01b1ad431684cc410d5fdde588', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 522.373765] env[62585]: DEBUG oslo_concurrency.lockutils [req-82b46655-9ea2-41a3-8d4b-1bfff1a70cbe req-25b8778e-be08-453f-8659-b9c0bf0a8afa service nova] Releasing lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.374285] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquired lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.374487] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 522.522252] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 522.563834] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384578, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024388} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 522.565341] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 522.565616] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Moving file from [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347/790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40. {{(pid=62585) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 522.566677] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b7fcdc94-1c58-4d18-8874-d30db1c2c2cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.575062] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 522.575062] env[62585]: value = "task-1384579" [ 522.575062] env[62585]: _type = "Task" [ 522.575062] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 522.586430] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384579, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 522.785235] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a05f7d-64c3-4b06-9c50-cd38df9792d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.795039] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4a1c94-11e7-473f-bd91-87c7d6d6c1ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.829792] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e4d57d-6415-4757-8071-3b00ffc1b185 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.837991] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ab766-736e-4c89-aa37-d2ba699d8049 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.852433] env[62585]: DEBUG nova.compute.provider_tree [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 522.906722] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.909868] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Successfully created port: 934ebd8c-620e-41c6-8472-d02137397e04 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 522.949786] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquiring lock "01432003-5c48-40e1-b22b-a538a7e34663" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.950253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Lock "01432003-5c48-40e1-b22b-a538a7e34663" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.091057] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384579, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.042498} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.091057] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] File moved {{(pid=62585) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 523.091674] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Cleaning up location [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 523.091842] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleting the datastore file [datastore1] vmware_temp/ee4b9c00-b55a-488b-b89d-656dc0884347 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 523.092887] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97fff218-8fac-4df4-9038-6a5f67311973 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.102026] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 523.102026] env[62585]: value = "task-1384580" [ 523.102026] env[62585]: _type = "Task" [ 523.102026] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.112576] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.210718] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.385365] env[62585]: ERROR nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [req-821a3ac0-a1f4-4746-9fdc-7513454ce6b0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 66db9ec1-b5c3-45d2-a885-8e338110656b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-821a3ac0-a1f4-4746-9fdc-7513454ce6b0"}]} [ 523.411382] env[62585]: DEBUG nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Refreshing inventories for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 523.430369] env[62585]: DEBUG nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updating ProviderTree inventory for provider 66db9ec1-b5c3-45d2-a885-8e338110656b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 523.430546] env[62585]: DEBUG nova.compute.provider_tree [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 523.444639] env[62585]: DEBUG nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Refreshing aggregate associations for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b, aggregates: None {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 523.480653] env[62585]: DEBUG nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Refreshing trait associations for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 523.533772] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 523.568537] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.568837] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.569026] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.569993] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.569993] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.569993] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.569993] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.569993] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.570239] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.571213] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.571435] env[62585]: DEBUG nova.virt.hardware [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.573182] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd5ad80-3edc-403c-ae84-f4712e32bfba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.589462] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc054a2f-ddfc-461a-b20f-c3fb1c14d0d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.630622] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025806} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.630875] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 523.631878] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a14a6437-85f2-4e0b-9c17-27277ee42296 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.637174] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 523.637174] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f0468a-b69f-da98-f11f-4be36eed8b76" [ 523.637174] env[62585]: _type = "Task" [ 523.637174] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.648930] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f0468a-b69f-da98-f11f-4be36eed8b76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.715443] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Releasing lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.715962] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 523.716204] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 523.720391] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81c7a8b4-5032-4f66-b2c0-7b956b2c8102 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.734789] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73edb6de-78e7-4821-bb90-cdd4d08730c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.762603] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40fd1fff-1df0-43b6-9cce-a666ecd63199 could not be found. [ 523.762887] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 523.763083] env[62585]: INFO nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Took 0.05 seconds to destroy the instance on the hypervisor. [ 523.763392] env[62585]: DEBUG oslo.service.loopingcall [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.764069] env[62585]: DEBUG nova.compute.manager [-] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 523.764069] env[62585]: DEBUG nova.network.neutron [-] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 523.809035] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00aada2d-aa27-4e13-885e-df9683180327 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.821030] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae14a85-b7e3-49ab-b54a-f08bb7d686f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.858498] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ecdf98-e54c-47db-9e79-5c0f39a57565 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.866695] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2502bf8-4156-471c-b7a2-e077bdf20ced {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.874064] env[62585]: DEBUG nova.network.neutron [-] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.883779] env[62585]: DEBUG nova.compute.provider_tree [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 524.156026] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f0468a-b69f-da98-f11f-4be36eed8b76, 'name': SearchDatastore_Task, 'duration_secs': 0.008529} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 524.156526] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.156526] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 779efd7e-99d5-4065-8ade-1665533677a4/779efd7e-99d5-4065-8ade-1665533677a4.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 524.156749] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77671cbd-9c31-4388-b32c-6ca57f03891d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.166203] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 524.166203] env[62585]: value = "task-1384581" [ 524.166203] env[62585]: _type = "Task" [ 524.166203] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.175515] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.386670] env[62585]: DEBUG nova.network.neutron [-] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.471659] env[62585]: DEBUG nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updated inventory for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with generation 18 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 524.473317] env[62585]: DEBUG nova.compute.provider_tree [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updating resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b generation from 18 to 19 during operation: update_inventory {{(pid=62585) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 524.473317] env[62585]: DEBUG nova.compute.provider_tree [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 524.482214] env[62585]: DEBUG nova.compute.manager [req-22519532-c53d-4c25-b46e-00bfe011f0f2 req-b5ef9cf6-24b9-4615-b4aa-9ee341281d56 service nova] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Received event network-vif-deleted-7f49104d-4e7d-451d-b8e9-a605cabd3b1a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 524.681937] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384581, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.890273] env[62585]: INFO nova.compute.manager [-] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Took 1.12 seconds to deallocate network for instance. [ 524.893059] env[62585]: DEBUG nova.compute.claims [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 524.893324] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.982446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.479s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.983649] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 524.987681] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.335s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.109098] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquiring lock "84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.109562] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Lock "84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.182339] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384581, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668092} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.182339] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 779efd7e-99d5-4065-8ade-1665533677a4/779efd7e-99d5-4065-8ade-1665533677a4.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 525.182339] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 525.182339] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-446e5f0e-d6c8-40d9-9ed0-d7023d403bfe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.190348] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 525.190348] env[62585]: value = "task-1384582" [ 525.190348] env[62585]: _type = "Task" [ 525.190348] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.198864] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384582, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.200364] env[62585]: ERROR nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 525.200364] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.200364] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.200364] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.200364] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.200364] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.200364] env[62585]: ERROR nova.compute.manager raise self.value [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.200364] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 525.200364] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.200364] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 525.200860] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.200860] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 525.200860] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 525.200860] env[62585]: ERROR nova.compute.manager [ 525.200860] env[62585]: Traceback (most recent call last): [ 525.200860] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 525.200860] env[62585]: listener.cb(fileno) [ 525.200860] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.200860] env[62585]: result = function(*args, **kwargs) [ 525.200860] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 525.200860] env[62585]: return func(*args, **kwargs) [ 525.200860] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 525.200860] env[62585]: raise e [ 525.200860] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.200860] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 525.200860] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.200860] env[62585]: created_port_ids = self._update_ports_for_instance( [ 525.200860] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.200860] env[62585]: with excutils.save_and_reraise_exception(): [ 525.200860] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.200860] env[62585]: self.force_reraise() [ 525.200860] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.200860] env[62585]: raise self.value [ 525.200860] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.200860] env[62585]: updated_port = self._update_port( [ 525.200860] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.200860] env[62585]: _ensure_no_port_binding_failure(port) [ 525.200860] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.200860] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 525.201894] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 525.201894] env[62585]: Removing descriptor: 15 [ 525.201894] env[62585]: ERROR nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Traceback (most recent call last): [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] yield resources [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self.driver.spawn(context, instance, image_meta, [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 525.201894] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] vm_ref = self.build_virtual_machine(instance, [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] for vif in network_info: [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return self._sync_wrapper(fn, *args, **kwargs) [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self.wait() [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self[:] = self._gt.wait() [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return self._exit_event.wait() [ 525.202289] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] result = hub.switch() [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return self.greenlet.switch() [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] result = function(*args, **kwargs) [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return func(*args, **kwargs) [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] raise e [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] nwinfo = self.network_api.allocate_for_instance( [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.202687] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] created_port_ids = self._update_ports_for_instance( [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] with excutils.save_and_reraise_exception(): [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self.force_reraise() [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] raise self.value [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] updated_port = self._update_port( [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] _ensure_no_port_binding_failure(port) [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.203062] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] raise exception.PortBindingFailed(port_id=port['id']) [ 525.203426] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 525.203426] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] [ 525.203426] env[62585]: INFO nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Terminating instance [ 525.204029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquiring lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.204172] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquired lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.204379] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 525.499884] env[62585]: DEBUG nova.compute.utils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.499884] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 525.499884] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 525.623895] env[62585]: DEBUG nova.policy [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e06e7768758e4fcb998008cea403fcc8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7ca32e48d504144bbcbfc4cc0301bdf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 525.701787] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384582, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066763} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.702259] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 525.703371] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782b1eb0-99a2-4dc4-b1ce-39f75dffc28d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.736486] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 779efd7e-99d5-4065-8ade-1665533677a4/779efd7e-99d5-4065-8ade-1665533677a4.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 525.741600] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-487e832f-7982-433c-9c94-6403644ad5aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.759786] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.768115] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 525.768115] env[62585]: value = "task-1384583" [ 525.768115] env[62585]: _type = "Task" [ 525.768115] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.778165] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384583, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.788561] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7a7a13-77a1-4572-b9b3-5f36abfb8637 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.796342] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52defb7-f056-4399-8ed3-c07bd1b13f24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.826604] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c97c790-357f-47da-9344-469c2379018d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.834079] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300aa615-f54e-4db9-943d-6870a3f0c4f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.847840] env[62585]: DEBUG nova.compute.provider_tree [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.005356] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 526.055466] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquiring lock "971d6e19-044b-4af8-b6c3-12b617cc24fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.055708] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Lock "971d6e19-044b-4af8-b6c3-12b617cc24fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.277886] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384583, 'name': ReconfigVM_Task, 'duration_secs': 0.27901} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.278465] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 779efd7e-99d5-4065-8ade-1665533677a4/779efd7e-99d5-4065-8ade-1665533677a4.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 526.279113] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72776725-ea53-4556-8122-ac07a84fbeb2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.288038] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 526.288038] env[62585]: value = "task-1384584" [ 526.288038] env[62585]: _type = "Task" [ 526.288038] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.303735] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384584, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.316374] env[62585]: DEBUG nova.compute.manager [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Received event network-changed-23d648f2-c6c6-4932-91e9-76c55fe82637 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.316568] env[62585]: DEBUG nova.compute.manager [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Refreshing instance network info cache due to event network-changed-23d648f2-c6c6-4932-91e9-76c55fe82637. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 526.316792] env[62585]: DEBUG oslo_concurrency.lockutils [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] Acquiring lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.328074] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.353769] env[62585]: DEBUG nova.scheduler.client.report [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 526.440487] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Successfully created port: f0854270-4feb-4756-a645-54b6d6320e21 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.532155] env[62585]: ERROR nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 526.532155] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.532155] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 526.532155] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 526.532155] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.532155] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.532155] env[62585]: ERROR nova.compute.manager raise self.value [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 526.532155] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 526.532155] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.532155] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 526.532625] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.532625] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 526.532625] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 526.532625] env[62585]: ERROR nova.compute.manager [ 526.532625] env[62585]: Traceback (most recent call last): [ 526.532625] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 526.532625] env[62585]: listener.cb(fileno) [ 526.532625] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.532625] env[62585]: result = function(*args, **kwargs) [ 526.532625] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 526.532625] env[62585]: return func(*args, **kwargs) [ 526.532625] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.532625] env[62585]: raise e [ 526.532625] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.532625] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 526.532625] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 526.532625] env[62585]: created_port_ids = self._update_ports_for_instance( [ 526.532625] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 526.532625] env[62585]: with excutils.save_and_reraise_exception(): [ 526.532625] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.532625] env[62585]: self.force_reraise() [ 526.532625] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.532625] env[62585]: raise self.value [ 526.532625] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 526.532625] env[62585]: updated_port = self._update_port( [ 526.532625] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.532625] env[62585]: _ensure_no_port_binding_failure(port) [ 526.532625] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.532625] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 526.533361] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 526.533361] env[62585]: Removing descriptor: 18 [ 526.533361] env[62585]: ERROR nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Traceback (most recent call last): [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] yield resources [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self.driver.spawn(context, instance, image_meta, [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self._vmops.spawn(context, instance, image_meta, injected_files, [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 526.533361] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] vm_ref = self.build_virtual_machine(instance, [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] vif_infos = vmwarevif.get_vif_info(self._session, [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] for vif in network_info: [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return self._sync_wrapper(fn, *args, **kwargs) [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self.wait() [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self[:] = self._gt.wait() [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return self._exit_event.wait() [ 526.533692] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] result = hub.switch() [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return self.greenlet.switch() [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] result = function(*args, **kwargs) [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return func(*args, **kwargs) [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] raise e [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] nwinfo = self.network_api.allocate_for_instance( [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 526.534012] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] created_port_ids = self._update_ports_for_instance( [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] with excutils.save_and_reraise_exception(): [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self.force_reraise() [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] raise self.value [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] updated_port = self._update_port( [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] _ensure_no_port_binding_failure(port) [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.534405] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] raise exception.PortBindingFailed(port_id=port['id']) [ 526.534732] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 526.534732] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] [ 526.534732] env[62585]: INFO nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Terminating instance [ 526.534816] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.536342] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquired lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.536342] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 526.801893] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384584, 'name': Rename_Task, 'duration_secs': 0.127059} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.801893] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 526.801893] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6b0893e-6bc1-4ed3-9603-8804e5090de9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.807831] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 526.807831] env[62585]: value = "task-1384585" [ 526.807831] env[62585]: _type = "Task" [ 526.807831] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.815438] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.833445] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Releasing lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.834293] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 526.834293] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 526.835867] env[62585]: DEBUG oslo_concurrency.lockutils [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] Acquired lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.835867] env[62585]: DEBUG nova.network.neutron [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Refreshing network info cache for port 23d648f2-c6c6-4932-91e9-76c55fe82637 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 526.837039] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3fede19-43c5-4187-8abc-92a3f99502b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.847479] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7634f0ab-be9e-4750-b130-131daf9c1211 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.861622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.874s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.864284] env[62585]: ERROR nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Traceback (most recent call last): [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self.driver.spawn(context, instance, image_meta, [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] vm_ref = self.build_virtual_machine(instance, [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] vif_infos = vmwarevif.get_vif_info(self._session, [ 526.864284] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] for vif in network_info: [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return self._sync_wrapper(fn, *args, **kwargs) [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self.wait() [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self[:] = self._gt.wait() [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return self._exit_event.wait() [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] result = hub.switch() [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 526.864835] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return self.greenlet.switch() [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] result = function(*args, **kwargs) [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] return func(*args, **kwargs) [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] raise e [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] nwinfo = self.network_api.allocate_for_instance( [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] created_port_ids = self._update_ports_for_instance( [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] with excutils.save_and_reraise_exception(): [ 526.865679] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] self.force_reraise() [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] raise self.value [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] updated_port = self._update_port( [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] _ensure_no_port_binding_failure(port) [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] raise exception.PortBindingFailed(port_id=port['id']) [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] nova.exception.PortBindingFailed: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. [ 526.866954] env[62585]: ERROR nova.compute.manager [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] [ 526.869165] env[62585]: DEBUG nova.compute.utils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 526.869653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.206s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.871853] env[62585]: INFO nova.compute.claims [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 526.876724] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Build of instance d8d432cc-07e0-4aac-9ad0-88a621173835 was re-scheduled: Binding failed for port dfee4c92-ac3f-4c83-b53e-451425f039b1, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 526.877221] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 526.877460] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.877612] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquired lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.877774] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 526.891548] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 916af5db-2e20-4156-9048-148f0f6253cd could not be found. [ 526.891548] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 526.891548] env[62585]: INFO nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Took 0.06 seconds to destroy the instance on the hypervisor. [ 526.891749] env[62585]: DEBUG oslo.service.loopingcall [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 526.892174] env[62585]: DEBUG nova.compute.manager [-] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 526.892436] env[62585]: DEBUG nova.network.neutron [-] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 526.934180] env[62585]: DEBUG nova.network.neutron [-] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.020653] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 527.057300] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.057593] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.057772] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.057977] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.058607] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.058846] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.059355] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.059599] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.059925] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.060178] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.060503] env[62585]: DEBUG nova.virt.hardware [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.061817] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5467c440-037d-48ac-81b4-f10d5c51ba6e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.076433] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9211176a-129a-43c5-8359-4bc535c3b0b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.096467] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.320717] env[62585]: DEBUG oslo_vmware.api [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384585, 'name': PowerOnVM_Task, 'duration_secs': 0.447999} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.321486] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 527.321823] env[62585]: INFO nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Took 8.72 seconds to spawn the instance on the hypervisor. [ 527.322225] env[62585]: DEBUG nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 527.323299] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d26f7b5-7b6b-4c23-876d-824998dd79d8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.385264] env[62585]: DEBUG nova.network.neutron [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.397663] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.435517] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.440846] env[62585]: DEBUG nova.network.neutron [-] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.623894] env[62585]: DEBUG nova.network.neutron [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.651281] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.736894] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquiring lock "68b4ca9d-f934-4b44-8c34-0b1bfb848672" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.736894] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Lock "68b4ca9d-f934-4b44-8c34-0b1bfb848672" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.844013] env[62585]: INFO nova.compute.manager [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Took 19.20 seconds to build instance. [ 527.903800] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Releasing lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.903800] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 527.903800] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 527.903800] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdcc82a0-c0e0-40b9-a80d-dee2c7045e24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.911289] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c28c66-dc3f-4c19-9b78-8803042d7473 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.928046] env[62585]: DEBUG nova.compute.manager [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Received event network-changed-934ebd8c-620e-41c6-8472-d02137397e04 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 527.928046] env[62585]: DEBUG nova.compute.manager [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Refreshing instance network info cache due to event network-changed-934ebd8c-620e-41c6-8472-d02137397e04. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 527.928046] env[62585]: DEBUG oslo_concurrency.lockutils [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] Acquiring lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.928835] env[62585]: DEBUG oslo_concurrency.lockutils [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] Acquired lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.929226] env[62585]: DEBUG nova.network.neutron [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Refreshing network info cache for port 934ebd8c-620e-41c6-8472-d02137397e04 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 527.939512] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f8ac8468-a804-4d0f-a0e8-864eb7064074 could not be found. [ 527.939731] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 527.939905] env[62585]: INFO nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Took 0.04 seconds to destroy the instance on the hypervisor. [ 527.940345] env[62585]: DEBUG oslo.service.loopingcall [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 527.940702] env[62585]: INFO nova.compute.manager [-] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Took 1.05 seconds to deallocate network for instance. [ 527.940909] env[62585]: DEBUG nova.compute.manager [-] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 527.941010] env[62585]: DEBUG nova.network.neutron [-] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 527.951726] env[62585]: DEBUG nova.compute.claims [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 527.952010] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.004246] env[62585]: DEBUG nova.network.neutron [-] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.125772] env[62585]: DEBUG oslo_concurrency.lockutils [req-f642b8e9-ccd2-4332-96ab-95fadbc3dd87 req-1ac4bb08-adaf-4740-b9bb-4e69b797674c service nova] Releasing lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.153155] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Releasing lock "refresh_cache-d8d432cc-07e0-4aac-9ad0-88a621173835" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.153442] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 528.153548] env[62585]: DEBUG nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.154535] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 528.199704] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e64efe9-8df8-4206-b8cc-3ec508f99f62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.209633] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.219145] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2614a48e-594c-44d5-b089-528c1222b49e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.256894] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e6f62a-41a1-463b-829d-5e5f049379da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.264670] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930ae917-28f9-4489-a178-491360ba9dc3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.279788] env[62585]: DEBUG nova.compute.provider_tree [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.345951] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd069f8a-e1b4-40b8-a144-cb4bf8f719e4 tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "779efd7e-99d5-4065-8ade-1665533677a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.721s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.508172] env[62585]: DEBUG nova.network.neutron [-] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.606027] env[62585]: DEBUG nova.network.neutron [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.714983] env[62585]: DEBUG nova.network.neutron [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.786063] env[62585]: DEBUG nova.scheduler.client.report [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.854463] env[62585]: DEBUG nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 528.882667] env[62585]: DEBUG nova.network.neutron [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.012740] env[62585]: INFO nova.compute.manager [-] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Took 1.07 seconds to deallocate network for instance. [ 529.013674] env[62585]: DEBUG nova.compute.claims [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 529.014116] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.219041] env[62585]: INFO nova.compute.manager [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] Took 1.06 seconds to deallocate network for instance. [ 529.279034] env[62585]: DEBUG nova.compute.manager [req-58f8c9e4-f304-4164-860c-fb97ed0fdc5d req-fbaeaa5b-e94f-4aa7-856b-910bc8efd54c service nova] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Received event network-vif-deleted-23d648f2-c6c6-4932-91e9-76c55fe82637 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 529.295528] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.296659] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 529.299656] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.331s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.301165] env[62585]: INFO nova.compute.claims [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.380038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.387766] env[62585]: DEBUG oslo_concurrency.lockutils [req-e483018a-07cb-44b2-9deb-6a218d196d82 req-b371b9b5-097a-49d0-81b7-cec131ec82c6 service nova] Releasing lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.807578] env[62585]: DEBUG nova.compute.utils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.814976] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 529.814976] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 530.249867] env[62585]: DEBUG nova.policy [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a9c52abe52a412e9a0cc9fddc22d24f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22bdec01b1ad431684cc410d5fdde588', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 530.263480] env[62585]: INFO nova.scheduler.client.report [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Deleted allocations for instance d8d432cc-07e0-4aac-9ad0-88a621173835 [ 530.313793] env[62585]: DEBUG nova.compute.manager [None req-1beeae52-ad21-4c56-b50c-c60cc148a6b1 tempest-ServerDiagnosticsV248Test-2060620221 tempest-ServerDiagnosticsV248Test-2060620221-project-admin] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 530.314922] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db53f194-4589-4c66-aa67-2128e98604fb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.322747] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 530.332826] env[62585]: INFO nova.compute.manager [None req-1beeae52-ad21-4c56-b50c-c60cc148a6b1 tempest-ServerDiagnosticsV248Test-2060620221 tempest-ServerDiagnosticsV248Test-2060620221-project-admin] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Retrieving diagnostics [ 530.333975] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a052c1-9514-4743-a6b5-f48af9036851 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.578961] env[62585]: ERROR nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 530.578961] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.578961] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.578961] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.578961] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.578961] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.578961] env[62585]: ERROR nova.compute.manager raise self.value [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.578961] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 530.578961] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.578961] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 530.579907] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.579907] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 530.579907] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 530.579907] env[62585]: ERROR nova.compute.manager [ 530.579907] env[62585]: Traceback (most recent call last): [ 530.579907] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 530.579907] env[62585]: listener.cb(fileno) [ 530.579907] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.579907] env[62585]: result = function(*args, **kwargs) [ 530.579907] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 530.579907] env[62585]: return func(*args, **kwargs) [ 530.579907] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 530.579907] env[62585]: raise e [ 530.579907] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.579907] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 530.579907] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.579907] env[62585]: created_port_ids = self._update_ports_for_instance( [ 530.579907] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.579907] env[62585]: with excutils.save_and_reraise_exception(): [ 530.579907] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.579907] env[62585]: self.force_reraise() [ 530.579907] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.579907] env[62585]: raise self.value [ 530.579907] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.579907] env[62585]: updated_port = self._update_port( [ 530.579907] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.579907] env[62585]: _ensure_no_port_binding_failure(port) [ 530.579907] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.579907] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 530.580668] env[62585]: nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 530.580668] env[62585]: Removing descriptor: 15 [ 530.580668] env[62585]: ERROR nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Traceback (most recent call last): [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] yield resources [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self.driver.spawn(context, instance, image_meta, [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.580668] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] vm_ref = self.build_virtual_machine(instance, [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] for vif in network_info: [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return self._sync_wrapper(fn, *args, **kwargs) [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self.wait() [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self[:] = self._gt.wait() [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return self._exit_event.wait() [ 530.581171] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] result = hub.switch() [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return self.greenlet.switch() [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] result = function(*args, **kwargs) [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return func(*args, **kwargs) [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] raise e [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] nwinfo = self.network_api.allocate_for_instance( [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.581802] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] created_port_ids = self._update_ports_for_instance( [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] with excutils.save_and_reraise_exception(): [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self.force_reraise() [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] raise self.value [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] updated_port = self._update_port( [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] _ensure_no_port_binding_failure(port) [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.582395] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] raise exception.PortBindingFailed(port_id=port['id']) [ 530.582709] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 530.582709] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] [ 530.582709] env[62585]: INFO nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Terminating instance [ 530.582709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquiring lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.582709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquired lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.582709] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 530.631066] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3c234d-464b-4aa7-a725-1938a7e1a108 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.639946] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a3e9dc-0f11-4db5-9daf-9477ceee04e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.671596] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799c9c33-93c7-4b79-8315-dff536af85e6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.679970] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4586a5-a23b-4dc7-b23f-fbf76dcd663d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.693825] env[62585]: DEBUG nova.compute.provider_tree [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.775220] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d56a5dc-78de-4039-896c-3ec779b70293 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "d8d432cc-07e0-4aac-9ad0-88a621173835" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.568s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.778921] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "d8d432cc-07e0-4aac-9ad0-88a621173835" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.664s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.778921] env[62585]: INFO nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d8d432cc-07e0-4aac-9ad0-88a621173835] During sync_power_state the instance has a pending task (spawning). Skip. [ 530.778921] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "d8d432cc-07e0-4aac-9ad0-88a621173835" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.071283] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Successfully created port: f650ce79-b0d6-4a2c-82dd-cfd77974d2f7 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.121487] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.200115] env[62585]: DEBUG nova.scheduler.client.report [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.277452] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 531.333255] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 531.364058] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.364302] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.364598] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.364671] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.365247] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.365487] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.365733] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.365911] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.366111] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.366311] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.366963] env[62585]: DEBUG nova.virt.hardware [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.367776] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9047343-06bf-4d75-a4be-45a660517691 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.380030] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f369e250-c3d1-46eb-baa3-12b44a3e8012 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.393184] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.633881] env[62585]: DEBUG nova.compute.manager [req-b4f11833-7474-4640-864f-cdbf79d7866d req-cc2b5426-624d-4f00-9c9c-eaddda8c1ef4 service nova] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Received event network-vif-deleted-934ebd8c-620e-41c6-8472-d02137397e04 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 531.709292] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.709804] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.712329] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.178s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.810446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.901324] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Releasing lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.901987] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 531.902087] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 531.902930] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84b4e5d5-fcad-4822-a4fe-a799dcd2e7a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.912014] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725cfbb1-9ae8-4c42-8932-eae855da628c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.935998] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 106dca6d-1ddf-4315-b645-c52c7c59f5d1 could not be found. [ 531.935998] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 531.936358] env[62585]: INFO nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 531.936636] env[62585]: DEBUG oslo.service.loopingcall [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 531.937105] env[62585]: DEBUG nova.compute.manager [-] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 531.937105] env[62585]: DEBUG nova.network.neutron [-] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 531.974237] env[62585]: DEBUG nova.network.neutron [-] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.217539] env[62585]: DEBUG nova.compute.utils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.230151] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 532.230151] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 532.281597] env[62585]: DEBUG nova.compute.manager [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Received event network-changed-f0854270-4feb-4756-a645-54b6d6320e21 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 532.282038] env[62585]: DEBUG nova.compute.manager [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Refreshing instance network info cache due to event network-changed-f0854270-4feb-4756-a645-54b6d6320e21. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 532.282420] env[62585]: DEBUG oslo_concurrency.lockutils [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] Acquiring lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.284769] env[62585]: DEBUG oslo_concurrency.lockutils [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] Acquired lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.284769] env[62585]: DEBUG nova.network.neutron [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Refreshing network info cache for port f0854270-4feb-4756-a645-54b6d6320e21 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 532.410549] env[62585]: DEBUG nova.policy [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd04fce3763e64515913ade9d9058c3a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '628b3ee79ca94a32a7022ae098ba2e9a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 532.475881] env[62585]: DEBUG nova.network.neutron [-] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.554065] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5410383-402f-46a1-bbfc-9f0cdb7c2448 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.566983] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0951179-3d4a-4a6d-b7fb-7da3213c5661 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.607021] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec25e9d-146f-473d-855f-0bd0d1300848 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.614665] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d29c29-ba75-4859-8a5c-c7183a96cd22 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.629661] env[62585]: DEBUG nova.compute.provider_tree [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.730818] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 532.798083] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquiring lock "b7686890-0ee7-4c5e-85f5-90a5c5241950" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.798329] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Lock "b7686890-0ee7-4c5e-85f5-90a5c5241950" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.833676] env[62585]: DEBUG nova.network.neutron [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.979852] env[62585]: INFO nova.compute.manager [-] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Took 1.04 seconds to deallocate network for instance. [ 532.985021] env[62585]: DEBUG nova.compute.claims [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 532.985021] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.020993] env[62585]: DEBUG nova.network.neutron [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.133434] env[62585]: DEBUG nova.scheduler.client.report [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.527948] env[62585]: DEBUG oslo_concurrency.lockutils [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] Releasing lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.529031] env[62585]: DEBUG nova.compute.manager [req-c4de2090-4a11-4901-bb26-5efea3e79cb0 req-d1f094cf-b627-4954-835f-21eeb7a7357d service nova] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Received event network-vif-deleted-f0854270-4feb-4756-a645-54b6d6320e21 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 533.641964] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.642759] env[62585]: ERROR nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Traceback (most recent call last): [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self.driver.spawn(context, instance, image_meta, [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] vm_ref = self.build_virtual_machine(instance, [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.642759] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] for vif in network_info: [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return self._sync_wrapper(fn, *args, **kwargs) [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self.wait() [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self[:] = self._gt.wait() [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return self._exit_event.wait() [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] result = hub.switch() [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.643474] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return self.greenlet.switch() [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] result = function(*args, **kwargs) [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] return func(*args, **kwargs) [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] raise e [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] nwinfo = self.network_api.allocate_for_instance( [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] created_port_ids = self._update_ports_for_instance( [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] with excutils.save_and_reraise_exception(): [ 533.643856] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] self.force_reraise() [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] raise self.value [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] updated_port = self._update_port( [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] _ensure_no_port_binding_failure(port) [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] raise exception.PortBindingFailed(port_id=port['id']) [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] nova.exception.PortBindingFailed: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. [ 533.644201] env[62585]: ERROR nova.compute.manager [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] [ 533.644475] env[62585]: DEBUG nova.compute.utils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 533.645579] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.879s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.649278] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Build of instance bc1b883b-32e4-45a8-b785-0eb53bbd7ae9 was re-scheduled: Binding failed for port d73950ff-ebe1-4fc5-8f22-da4a90362254, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 533.654135] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 533.654404] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquiring lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.654580] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Acquired lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.654947] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.743789] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 533.776936] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.776936] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.776936] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.777098] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.777098] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.777098] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.777098] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.777098] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.777324] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.777324] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.777324] env[62585]: DEBUG nova.virt.hardware [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.777324] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0563dd-06e1-48f5-a7c9-759026979031 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.787459] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a223b4e-c8be-4b7a-9a50-e18e7e1bfc12 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.871663] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Successfully created port: 697d8534-47a6-44ae-8467-6ab6f6378b6f {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 534.206488] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.251149] env[62585]: ERROR nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 534.251149] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.251149] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.251149] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.251149] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.251149] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.251149] env[62585]: ERROR nova.compute.manager raise self.value [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.251149] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 534.251149] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.251149] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 534.252008] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.252008] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 534.252008] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 534.252008] env[62585]: ERROR nova.compute.manager [ 534.252008] env[62585]: Traceback (most recent call last): [ 534.252008] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 534.252008] env[62585]: listener.cb(fileno) [ 534.252008] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.252008] env[62585]: result = function(*args, **kwargs) [ 534.252008] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 534.252008] env[62585]: return func(*args, **kwargs) [ 534.252008] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.252008] env[62585]: raise e [ 534.252008] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.252008] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 534.252008] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.252008] env[62585]: created_port_ids = self._update_ports_for_instance( [ 534.252008] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.252008] env[62585]: with excutils.save_and_reraise_exception(): [ 534.252008] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.252008] env[62585]: self.force_reraise() [ 534.252008] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.252008] env[62585]: raise self.value [ 534.252008] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.252008] env[62585]: updated_port = self._update_port( [ 534.252008] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.252008] env[62585]: _ensure_no_port_binding_failure(port) [ 534.252008] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.252008] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 534.253770] env[62585]: nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 534.253770] env[62585]: Removing descriptor: 16 [ 534.253770] env[62585]: ERROR nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Traceback (most recent call last): [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] yield resources [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self.driver.spawn(context, instance, image_meta, [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.253770] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] vm_ref = self.build_virtual_machine(instance, [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] for vif in network_info: [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return self._sync_wrapper(fn, *args, **kwargs) [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self.wait() [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self[:] = self._gt.wait() [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return self._exit_event.wait() [ 534.254098] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] result = hub.switch() [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return self.greenlet.switch() [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] result = function(*args, **kwargs) [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return func(*args, **kwargs) [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] raise e [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] nwinfo = self.network_api.allocate_for_instance( [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.254429] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] created_port_ids = self._update_ports_for_instance( [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] with excutils.save_and_reraise_exception(): [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self.force_reraise() [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] raise self.value [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] updated_port = self._update_port( [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] _ensure_no_port_binding_failure(port) [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.255463] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] raise exception.PortBindingFailed(port_id=port['id']) [ 534.255905] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 534.255905] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] [ 534.255905] env[62585]: INFO nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Terminating instance [ 534.259159] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.259159] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquired lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.259159] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 534.306661] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.495372] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquiring lock "5177b4e2-e990-47e6-9f2b-156ca0ee8387" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.496319] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Lock "5177b4e2-e990-47e6-9f2b-156ca0ee8387" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.557500] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cb49eb-e55d-41ec-88c3-58eb639ff309 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.567773] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bce2f56-4971-445f-a4c3-214798d27a5a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.618255] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18925cc-b9c6-4ee9-882c-63412d61e8d4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.633970] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7355d43a-feb9-4f09-b192-fc387a5cab2b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.648713] env[62585]: DEBUG nova.compute.provider_tree [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.811250] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Releasing lock "refresh_cache-bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.811646] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 534.811646] env[62585]: DEBUG nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.811772] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.819809] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.849964] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.982957] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.156230] env[62585]: DEBUG nova.scheduler.client.report [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.357730] env[62585]: DEBUG nova.network.neutron [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.487140] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Releasing lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.488799] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 535.488799] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 535.488799] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2941abd6-56b2-4834-8320-8b7c2c275ad3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.501962] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6364a8f-6699-4df3-8049-8f6818a46b55 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.539223] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 598c7b4f-8239-45af-8bc5-caf6b47172ba could not be found. [ 535.539471] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.539650] env[62585]: INFO nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Took 0.05 seconds to destroy the instance on the hypervisor. [ 535.539976] env[62585]: DEBUG oslo.service.loopingcall [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.540136] env[62585]: DEBUG nova.compute.manager [-] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 535.542073] env[62585]: DEBUG nova.network.neutron [-] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 535.574529] env[62585]: DEBUG nova.network.neutron [-] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.662572] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.017s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.662753] env[62585]: ERROR nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Traceback (most recent call last): [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self.driver.spawn(context, instance, image_meta, [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] vm_ref = self.build_virtual_machine(instance, [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 535.662753] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] for vif in network_info: [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return self._sync_wrapper(fn, *args, **kwargs) [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self.wait() [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self[:] = self._gt.wait() [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return self._exit_event.wait() [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] result = hub.switch() [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 535.663056] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return self.greenlet.switch() [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] result = function(*args, **kwargs) [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] return func(*args, **kwargs) [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] raise e [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] nwinfo = self.network_api.allocate_for_instance( [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] created_port_ids = self._update_ports_for_instance( [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] with excutils.save_and_reraise_exception(): [ 535.663447] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] self.force_reraise() [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] raise self.value [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] updated_port = self._update_port( [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] _ensure_no_port_binding_failure(port) [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] raise exception.PortBindingFailed(port_id=port['id']) [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] nova.exception.PortBindingFailed: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. [ 535.663763] env[62585]: ERROR nova.compute.manager [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] [ 535.664060] env[62585]: DEBUG nova.compute.utils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 535.666272] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.060s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.667159] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Build of instance 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8 was re-scheduled: Binding failed for port 83c2aeb0-e3b0-45b2-9ad0-a3a8a8d5c3c1, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 535.667532] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 535.669916] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquiring lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.670340] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Acquired lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.670340] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 535.704576] env[62585]: DEBUG nova.compute.manager [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Received event network-changed-f650ce79-b0d6-4a2c-82dd-cfd77974d2f7 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 535.704576] env[62585]: DEBUG nova.compute.manager [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Refreshing instance network info cache due to event network-changed-f650ce79-b0d6-4a2c-82dd-cfd77974d2f7. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 535.704576] env[62585]: DEBUG oslo_concurrency.lockutils [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] Acquiring lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.704576] env[62585]: DEBUG oslo_concurrency.lockutils [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] Acquired lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.704576] env[62585]: DEBUG nova.network.neutron [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Refreshing network info cache for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 535.822546] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquiring lock "6f2ca381-b4a3-47ce-b135-dbceb7e44d24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.822771] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Lock "6f2ca381-b4a3-47ce-b135-dbceb7e44d24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.862806] env[62585]: INFO nova.compute.manager [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] Took 1.05 seconds to deallocate network for instance. [ 535.962189] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquiring lock "92080abc-eb47-439b-b702-d226666fa155" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.962189] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Lock "92080abc-eb47-439b-b702-d226666fa155" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.077444] env[62585]: DEBUG nova.network.neutron [-] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.196143] env[62585]: ERROR nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 536.196143] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.196143] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.196143] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.196143] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.196143] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.196143] env[62585]: ERROR nova.compute.manager raise self.value [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.196143] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.196143] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.196143] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.196994] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.196994] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.196994] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 536.196994] env[62585]: ERROR nova.compute.manager [ 536.196994] env[62585]: Traceback (most recent call last): [ 536.196994] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.196994] env[62585]: listener.cb(fileno) [ 536.196994] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.196994] env[62585]: result = function(*args, **kwargs) [ 536.196994] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.196994] env[62585]: return func(*args, **kwargs) [ 536.196994] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.196994] env[62585]: raise e [ 536.196994] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.196994] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 536.196994] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.196994] env[62585]: created_port_ids = self._update_ports_for_instance( [ 536.196994] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.196994] env[62585]: with excutils.save_and_reraise_exception(): [ 536.196994] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.196994] env[62585]: self.force_reraise() [ 536.196994] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.196994] env[62585]: raise self.value [ 536.196994] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.196994] env[62585]: updated_port = self._update_port( [ 536.196994] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.196994] env[62585]: _ensure_no_port_binding_failure(port) [ 536.196994] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.196994] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.198612] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 536.198612] env[62585]: Removing descriptor: 15 [ 536.198612] env[62585]: ERROR nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Traceback (most recent call last): [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] yield resources [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self.driver.spawn(context, instance, image_meta, [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.198612] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] vm_ref = self.build_virtual_machine(instance, [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] for vif in network_info: [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return self._sync_wrapper(fn, *args, **kwargs) [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self.wait() [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self[:] = self._gt.wait() [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return self._exit_event.wait() [ 536.199212] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] result = hub.switch() [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return self.greenlet.switch() [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] result = function(*args, **kwargs) [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return func(*args, **kwargs) [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] raise e [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] nwinfo = self.network_api.allocate_for_instance( [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.201276] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] created_port_ids = self._update_ports_for_instance( [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] with excutils.save_and_reraise_exception(): [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self.force_reraise() [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] raise self.value [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] updated_port = self._update_port( [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] _ensure_no_port_binding_failure(port) [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.201930] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] raise exception.PortBindingFailed(port_id=port['id']) [ 536.202923] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 536.202923] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] [ 536.202923] env[62585]: INFO nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Terminating instance [ 536.202923] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquiring lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.202923] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquired lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.202923] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.207962] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.233256] env[62585]: DEBUG nova.network.neutron [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.340819] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.376210] env[62585]: DEBUG nova.network.neutron [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.580993] env[62585]: INFO nova.compute.manager [-] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Took 1.04 seconds to deallocate network for instance. [ 536.583871] env[62585]: DEBUG nova.compute.claims [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 536.584093] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.727482] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.821207] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.844308] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Releasing lock "refresh_cache-1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.844308] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 536.844308] env[62585]: DEBUG nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 536.844308] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 536.865372] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.880069] env[62585]: DEBUG oslo_concurrency.lockutils [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] Releasing lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.880069] env[62585]: DEBUG nova.compute.manager [req-9cf9509f-e2cf-4a23-9335-661ba34fc8bf req-91f66d6b-06ed-41c7-9277-9e7a66771389 service nova] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Received event network-vif-deleted-f650ce79-b0d6-4a2c-82dd-cfd77974d2f7 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.899156] env[62585]: INFO nova.scheduler.client.report [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Deleted allocations for instance bc1b883b-32e4-45a8-b785-0eb53bbd7ae9 [ 536.976376] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "a8af7330-6454-439c-870b-73d1637b6438" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.976635] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "a8af7330-6454-439c-870b-73d1637b6438" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.218040] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance bc1b883b-32e4-45a8-b785-0eb53bbd7ae9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 537.293277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "29f9e25a-a0b2-4bb8-b59a-3617819d3be5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.293277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "29f9e25a-a0b2-4bb8-b59a-3617819d3be5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.324429] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Releasing lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.324973] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 537.325276] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 537.325892] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f99eb16e-ab41-47c4-8644-d745b21677e3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.342950] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125beab5-b198-4fc2-849f-8895686897b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.370471] env[62585]: DEBUG nova.network.neutron [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.370471] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d05d741-1b46-4646-8269-f72dc6ad5cbd could not be found. [ 537.370891] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 537.370978] env[62585]: INFO nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 537.371212] env[62585]: DEBUG oslo.service.loopingcall [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.371933] env[62585]: DEBUG nova.compute.manager [-] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.372052] env[62585]: DEBUG nova.network.neutron [-] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 537.387379] env[62585]: DEBUG nova.network.neutron [-] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.410995] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquiring lock "e883b58a-0fa6-48fd-a8a7-24ead857e6f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.411425] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Lock "e883b58a-0fa6-48fd-a8a7-24ead857e6f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.413436] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f9fc8305-1151-4253-b247-66ba959d55cf tempest-ServersAdminNegativeTestJSON-812836573 tempest-ServersAdminNegativeTestJSON-812836573-project-member] Lock "bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.931s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.414825] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 24.303s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.415419] env[62585]: INFO nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: bc1b883b-32e4-45a8-b785-0eb53bbd7ae9] During sync_power_state the instance has a pending task (spawning). Skip. [ 537.415752] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "bc1b883b-32e4-45a8-b785-0eb53bbd7ae9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.720955] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 537.721333] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 40fd1fff-1df0-43b6-9cce-a666ecd63199 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.721333] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 779efd7e-99d5-4065-8ade-1665533677a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.721602] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 916af5db-2e20-4156-9048-148f0f6253cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.721602] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance f8ac8468-a804-4d0f-a0e8-864eb7064074 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.722231] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 106dca6d-1ddf-4315-b645-c52c7c59f5d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.722231] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 598c7b4f-8239-45af-8bc5-caf6b47172ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.722231] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 3d05d741-1b46-4646-8269-f72dc6ad5cbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 537.873199] env[62585]: INFO nova.compute.manager [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] Took 1.03 seconds to deallocate network for instance. [ 537.891029] env[62585]: DEBUG nova.network.neutron [-] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.917902] env[62585]: DEBUG nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 537.991890] env[62585]: DEBUG nova.compute.manager [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Received event network-changed-697d8534-47a6-44ae-8467-6ab6f6378b6f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 537.992481] env[62585]: DEBUG nova.compute.manager [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Refreshing instance network info cache due to event network-changed-697d8534-47a6-44ae-8467-6ab6f6378b6f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 537.992481] env[62585]: DEBUG oslo_concurrency.lockutils [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] Acquiring lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.992610] env[62585]: DEBUG oslo_concurrency.lockutils [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] Acquired lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.992721] env[62585]: DEBUG nova.network.neutron [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Refreshing network info cache for port 697d8534-47a6-44ae-8467-6ab6f6378b6f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 538.227281] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance dd387320-7101-440c-80bc-a7d19a654df8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 538.393549] env[62585]: INFO nova.compute.manager [-] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Took 1.02 seconds to deallocate network for instance. [ 538.399148] env[62585]: DEBUG nova.compute.claims [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 538.399148] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.445959] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.520667] env[62585]: DEBUG nova.network.neutron [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.631865] env[62585]: DEBUG nova.network.neutron [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.731955] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 1531ed40-29c2-4812-afd5-eabffe22f4ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 538.907111] env[62585]: INFO nova.scheduler.client.report [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Deleted allocations for instance 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8 [ 539.135453] env[62585]: DEBUG oslo_concurrency.lockutils [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] Releasing lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.135453] env[62585]: DEBUG nova.compute.manager [req-9f4068d8-3ff2-4649-a766-5ba59ff19888 req-42cb3508-7b25-4a84-9f16-014d1c12b0d4 service nova] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Received event network-vif-deleted-697d8534-47a6-44ae-8467-6ab6f6378b6f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.235106] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 149bd77b-9583-42e5-8c82-f795cac53b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 539.415346] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02837597-0855-408c-b46e-2c9186f66afd tempest-ImagesOneServerTestJSON-1513938224 tempest-ImagesOneServerTestJSON-1513938224-project-member] Lock "1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.004s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.416838] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 26.305s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.417016] env[62585]: INFO nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 1a7356c7-1442-4de3-8a1f-04fc1bfb03b8] During sync_power_state the instance has a pending task (networking). Skip. [ 539.417647] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "1a7356c7-1442-4de3-8a1f-04fc1bfb03b8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.740043] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 01432003-5c48-40e1-b22b-a538a7e34663 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 539.919424] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 540.242438] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 540.452267] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.748872] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 971d6e19-044b-4af8-b6c3-12b617cc24fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 541.254932] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 68b4ca9d-f934-4b44-8c34-0b1bfb848672 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 541.760643] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance b7686890-0ee7-4c5e-85f5-90a5c5241950 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 542.265227] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 5177b4e2-e990-47e6-9f2b-156ca0ee8387 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 542.769519] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 6f2ca381-b4a3-47ce-b135-dbceb7e44d24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 543.007619] env[62585]: DEBUG nova.compute.manager [None req-99b3d21a-a8bf-49f8-90a4-184a4f75330a tempest-ServerDiagnosticsV248Test-2060620221 tempest-ServerDiagnosticsV248Test-2060620221-project-admin] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 543.008053] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50491dfe-4209-43c0-b9a1-cc29cdcbd7d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.021487] env[62585]: INFO nova.compute.manager [None req-99b3d21a-a8bf-49f8-90a4-184a4f75330a tempest-ServerDiagnosticsV248Test-2060620221 tempest-ServerDiagnosticsV248Test-2060620221-project-admin] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Retrieving diagnostics [ 543.022837] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b65625-260b-4dcd-b9af-2f5d9ac01711 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.275654] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 92080abc-eb47-439b-b702-d226666fa155 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 543.783247] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance a8af7330-6454-439c-870b-73d1637b6438 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 543.783570] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 543.785708] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 544.279157] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b489e4-54d3-4d09-8285-80ac7af8dfc6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.290624] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c43bfd3-3213-43cd-8237-f366f5da78c3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.336178] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d45f64-e707-4c52-ae9a-fa396c8c764b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.351688] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f121fe-8558-48fc-a579-d89ff4030eca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.368870] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.845565] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "779efd7e-99d5-4065-8ade-1665533677a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.845864] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "779efd7e-99d5-4065-8ade-1665533677a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.846073] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "779efd7e-99d5-4065-8ade-1665533677a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.846216] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "779efd7e-99d5-4065-8ade-1665533677a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.846411] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "779efd7e-99d5-4065-8ade-1665533677a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.852402] env[62585]: INFO nova.compute.manager [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Terminating instance [ 544.854360] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "refresh_cache-779efd7e-99d5-4065-8ade-1665533677a4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.855021] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquired lock "refresh_cache-779efd7e-99d5-4065-8ade-1665533677a4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.855021] env[62585]: DEBUG nova.network.neutron [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 544.875018] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.384547] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 545.384547] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.718s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.384547] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.490s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.388140] env[62585]: DEBUG nova.network.neutron [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.543422] env[62585]: DEBUG nova.network.neutron [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.047412] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Releasing lock "refresh_cache-779efd7e-99d5-4065-8ade-1665533677a4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.047932] env[62585]: DEBUG nova.compute.manager [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 546.048084] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 546.048930] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7abad13-f73f-4d26-a059-f032cc5a0c0a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.060455] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 546.060773] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16549d55-57dd-4def-b6b4-9afdbd100f87 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.070701] env[62585]: DEBUG oslo_vmware.api [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 546.070701] env[62585]: value = "task-1384597" [ 546.070701] env[62585]: _type = "Task" [ 546.070701] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.089180] env[62585]: DEBUG oslo_vmware.api [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384597, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.287781] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f448f01-f4a0-4870-b87a-a53d5da4c88a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.299067] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab468d3-516a-49a8-b122-f71966f66f77 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.333090] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2f42a1-e7bc-43e4-9120-a52563832ab4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.343696] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39e990c-8770-49c0-ac6d-a51308edce4c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.362491] env[62585]: DEBUG nova.compute.provider_tree [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.580909] env[62585]: DEBUG oslo_vmware.api [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384597, 'name': PowerOffVM_Task, 'duration_secs': 0.1339} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.581219] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 546.581386] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 546.581631] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ef9b6b1-8dcc-42f9-b619-63ea32e83928 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.614199] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 546.614199] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 546.614479] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleting the datastore file [datastore1] 779efd7e-99d5-4065-8ade-1665533677a4 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 546.615630] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bc378a5-89a0-4604-b4c9-50b7f0d0dd99 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.622972] env[62585]: DEBUG oslo_vmware.api [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for the task: (returnval){ [ 546.622972] env[62585]: value = "task-1384599" [ 546.622972] env[62585]: _type = "Task" [ 546.622972] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.632328] env[62585]: DEBUG oslo_vmware.api [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.800880] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquiring lock "5cd813d1-f778-4c8a-920b-64e92a3b52af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.801188] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Lock "5cd813d1-f778-4c8a-920b-64e92a3b52af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.865401] env[62585]: DEBUG nova.scheduler.client.report [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.134586] env[62585]: DEBUG oslo_vmware.api [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Task: {'id': task-1384599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093322} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.134910] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 547.135873] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 547.135873] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 547.135873] env[62585]: INFO nova.compute.manager [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Took 1.09 seconds to destroy the instance on the hypervisor. [ 547.135873] env[62585]: DEBUG oslo.service.loopingcall [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.136077] env[62585]: DEBUG nova.compute.manager [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 547.136077] env[62585]: DEBUG nova.network.neutron [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 547.164947] env[62585]: DEBUG nova.network.neutron [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.375817] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.993s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.376562] env[62585]: ERROR nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Traceback (most recent call last): [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self.driver.spawn(context, instance, image_meta, [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] vm_ref = self.build_virtual_machine(instance, [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.376562] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] for vif in network_info: [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return self._sync_wrapper(fn, *args, **kwargs) [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self.wait() [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self[:] = self._gt.wait() [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return self._exit_event.wait() [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] result = hub.switch() [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.377252] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return self.greenlet.switch() [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] result = function(*args, **kwargs) [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] return func(*args, **kwargs) [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] raise e [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] nwinfo = self.network_api.allocate_for_instance( [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] created_port_ids = self._update_ports_for_instance( [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] with excutils.save_and_reraise_exception(): [ 547.377744] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] self.force_reraise() [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] raise self.value [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] updated_port = self._update_port( [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] _ensure_no_port_binding_failure(port) [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] raise exception.PortBindingFailed(port_id=port['id']) [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] nova.exception.PortBindingFailed: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. [ 547.378162] env[62585]: ERROR nova.compute.manager [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] [ 547.378508] env[62585]: DEBUG nova.compute.utils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 547.379193] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.427s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.383577] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Build of instance 40fd1fff-1df0-43b6-9cce-a666ecd63199 was re-scheduled: Binding failed for port 7f49104d-4e7d-451d-b8e9-a605cabd3b1a, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 547.383577] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 547.384480] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.384480] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquired lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.384480] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.410257] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquiring lock "c080105d-4a58-4616-b65c-7bac79dd93c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.410489] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Lock "c080105d-4a58-4616-b65c-7bac79dd93c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.458399] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquiring lock "14557f1a-2410-4201-9b91-49d23f18d47a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.458626] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Lock "14557f1a-2410-4201-9b91-49d23f18d47a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.667344] env[62585]: DEBUG nova.network.neutron [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.908532] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.023646] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.177365] env[62585]: INFO nova.compute.manager [-] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Took 1.04 seconds to deallocate network for instance. [ 548.337103] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8871c2d-e51e-4f5e-849f-94b0b865cc46 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.345730] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866f995d-9d9d-4b56-9c6b-464be5d43765 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.377468] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54be83d-2dce-4f84-9e5b-a75bab307a75 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.386348] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4faf366-e1c5-41d5-91c6-3cbe046baf73 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.403481] env[62585]: DEBUG nova.compute.provider_tree [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.529806] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Releasing lock "refresh_cache-40fd1fff-1df0-43b6-9cce-a666ecd63199" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.530063] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 548.530237] env[62585]: DEBUG nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 548.530401] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 548.549165] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.686708] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.909062] env[62585]: DEBUG nova.scheduler.client.report [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.052585] env[62585]: DEBUG nova.network.neutron [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.418163] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.039s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.418694] env[62585]: ERROR nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Traceback (most recent call last): [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self.driver.spawn(context, instance, image_meta, [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] vm_ref = self.build_virtual_machine(instance, [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.418694] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] for vif in network_info: [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return self._sync_wrapper(fn, *args, **kwargs) [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self.wait() [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self[:] = self._gt.wait() [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return self._exit_event.wait() [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] result = hub.switch() [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.419077] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return self.greenlet.switch() [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] result = function(*args, **kwargs) [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] return func(*args, **kwargs) [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] raise e [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] nwinfo = self.network_api.allocate_for_instance( [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] created_port_ids = self._update_ports_for_instance( [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] with excutils.save_and_reraise_exception(): [ 549.419423] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] self.force_reraise() [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] raise self.value [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] updated_port = self._update_port( [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] _ensure_no_port_binding_failure(port) [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] raise exception.PortBindingFailed(port_id=port['id']) [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] nova.exception.PortBindingFailed: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. [ 549.419773] env[62585]: ERROR nova.compute.manager [instance: 916af5db-2e20-4156-9048-148f0f6253cd] [ 549.420084] env[62585]: DEBUG nova.compute.utils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 549.421156] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.407s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.424488] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Build of instance 916af5db-2e20-4156-9048-148f0f6253cd was re-scheduled: Binding failed for port 23d648f2-c6c6-4932-91e9-76c55fe82637, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 549.424983] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 549.425241] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquiring lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.425921] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Acquired lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.425921] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 549.557534] env[62585]: INFO nova.compute.manager [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] Took 1.03 seconds to deallocate network for instance. [ 549.982252] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.159671] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.412282] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bd3b1d-953c-4077-9ed9-eb6eb69331af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.420981] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3479c688-079f-48e6-a92e-f9b0fb2525f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.463230] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a7bc4f-3b89-4433-b21b-f010dcf3c8d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.475500] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc502042-bfda-433f-9ff2-abbc95a921ff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.492362] env[62585]: DEBUG nova.compute.provider_tree [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.636191] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "16f01d66-44f8-4912-989a-48c39f667c95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.636191] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "16f01d66-44f8-4912-989a-48c39f667c95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.673511] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Releasing lock "refresh_cache-916af5db-2e20-4156-9048-148f0f6253cd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.674036] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 550.675461] env[62585]: DEBUG nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.675461] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 550.681317] env[62585]: INFO nova.scheduler.client.report [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Deleted allocations for instance 40fd1fff-1df0-43b6-9cce-a666ecd63199 [ 550.711185] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.995330] env[62585]: DEBUG nova.scheduler.client.report [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.196702] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ab8bce8e-d935-462f-9b83-b66044880369 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "40fd1fff-1df0-43b6-9cce-a666ecd63199" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.121s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.198125] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "40fd1fff-1df0-43b6-9cce-a666ecd63199" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 38.086s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.198461] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47002da1-ab10-426e-8b48-958bb34fa0e8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.210232] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0993f0bf-ba0b-49e3-bf64-7a1fe3afba57 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.224109] env[62585]: DEBUG nova.network.neutron [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.500983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.080s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.501663] env[62585]: ERROR nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Traceback (most recent call last): [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self.driver.spawn(context, instance, image_meta, [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self._vmops.spawn(context, instance, image_meta, injected_files, [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] vm_ref = self.build_virtual_machine(instance, [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] vif_infos = vmwarevif.get_vif_info(self._session, [ 551.501663] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] for vif in network_info: [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return self._sync_wrapper(fn, *args, **kwargs) [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self.wait() [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self[:] = self._gt.wait() [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return self._exit_event.wait() [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] result = hub.switch() [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 551.502973] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return self.greenlet.switch() [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] result = function(*args, **kwargs) [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] return func(*args, **kwargs) [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] raise e [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] nwinfo = self.network_api.allocate_for_instance( [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] created_port_ids = self._update_ports_for_instance( [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] with excutils.save_and_reraise_exception(): [ 551.503661] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] self.force_reraise() [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] raise self.value [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] updated_port = self._update_port( [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] _ensure_no_port_binding_failure(port) [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] raise exception.PortBindingFailed(port_id=port['id']) [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] nova.exception.PortBindingFailed: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. [ 551.504519] env[62585]: ERROR nova.compute.manager [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] [ 551.504988] env[62585]: DEBUG nova.compute.utils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 551.504988] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.125s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.506082] env[62585]: INFO nova.compute.claims [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.511928] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Build of instance f8ac8468-a804-4d0f-a0e8-864eb7064074 was re-scheduled: Binding failed for port 934ebd8c-620e-41c6-8472-d02137397e04, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 551.514196] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 551.514196] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.514196] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquired lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.514196] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.701744] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 551.728539] env[62585]: INFO nova.compute.manager [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] [instance: 916af5db-2e20-4156-9048-148f0f6253cd] Took 1.05 seconds to deallocate network for instance. [ 551.744139] env[62585]: INFO nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 40fd1fff-1df0-43b6-9cce-a666ecd63199] During the sync_power process the instance has moved from host None to host cpu-1 [ 551.744370] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "40fd1fff-1df0-43b6-9cce-a666ecd63199" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.922127] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "2cf0927d-8d98-4554-92ce-c049e1ea179c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.922424] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "2cf0927d-8d98-4554-92ce-c049e1ea179c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.050223] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.229614] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.251528] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.756864] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Releasing lock "refresh_cache-f8ac8468-a804-4d0f-a0e8-864eb7064074" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.756864] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 552.756864] env[62585]: DEBUG nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 552.756864] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 552.776020] env[62585]: INFO nova.scheduler.client.report [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Deleted allocations for instance 916af5db-2e20-4156-9048-148f0f6253cd [ 552.802814] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.980877] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6e75c3-fece-483c-9cb9-b1bcc05d6158 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.996786] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0465974-e9f8-482a-baa2-0d5dd3d9a910 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.007403] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquiring lock "3695a09f-dffc-4537-ac78-faffd6bdd252" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.007403] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Lock "3695a09f-dffc-4537-ac78-faffd6bdd252" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.042198] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eff15ee-d47d-40cf-b5f4-0b19c845fde2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.052487] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46d67ae-2402-47a0-84b8-d681db8fb291 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.069720] env[62585]: DEBUG nova.compute.provider_tree [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.287351] env[62585]: DEBUG oslo_concurrency.lockutils [None req-83cb95a9-812e-4088-abae-5d85e7e3168e tempest-ServersTestManualDisk-589348422 tempest-ServersTestManualDisk-589348422-project-member] Lock "916af5db-2e20-4156-9048-148f0f6253cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.883s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.306595] env[62585]: DEBUG nova.network.neutron [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.572803] env[62585]: DEBUG nova.scheduler.client.report [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.790735] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 553.810933] env[62585]: INFO nova.compute.manager [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: f8ac8468-a804-4d0f-a0e8-864eb7064074] Took 1.06 seconds to deallocate network for instance. [ 554.080833] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.080833] env[62585]: DEBUG nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 554.083960] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.274s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.085534] env[62585]: INFO nova.compute.claims [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 554.324315] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.585922] env[62585]: DEBUG nova.compute.utils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.588533] env[62585]: DEBUG nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 554.851262] env[62585]: INFO nova.scheduler.client.report [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Deleted allocations for instance f8ac8468-a804-4d0f-a0e8-864eb7064074 [ 555.027257] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquiring lock "86e3d197-2e8c-4357-ac0a-e1af8e247024" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.027257] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Lock "86e3d197-2e8c-4357-ac0a-e1af8e247024" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.093677] env[62585]: DEBUG nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 555.170682] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquiring lock "aae9ff25-f304-4dbe-824c-b17b3522655c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.171015] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Lock "aae9ff25-f304-4dbe-824c-b17b3522655c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.360758] env[62585]: DEBUG oslo_concurrency.lockutils [None req-61b8651b-db78-4456-b996-4657196a3490 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "f8ac8468-a804-4d0f-a0e8-864eb7064074" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.150s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.493236] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c88327-f65b-47c9-a000-c151912ac049 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.499367] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f1cee0-5e1a-4938-9f93-2074a6964e62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.529590] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e6c7c2-af67-4db5-9e31-a8f6b00cdd12 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.537389] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0965a348-1f49-4f5f-8f8b-709536a51706 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.555072] env[62585]: DEBUG nova.compute.provider_tree [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.868907] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 556.061025] env[62585]: DEBUG nova.scheduler.client.report [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.103515] env[62585]: DEBUG nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 556.144028] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.144028] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.144028] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.144028] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.144326] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.144326] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.144518] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.144675] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.144837] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.145011] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.145325] env[62585]: DEBUG nova.virt.hardware [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.146227] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf8a73e-b7bc-4dfc-80de-4b2787d358b5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.156785] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1652c3-fa37-42bb-9855-b7c527c82c5f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.179546] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.189086] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Creating folder: Project (434d224eea6b48519395b747b79015b8). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.190711] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9985b5a-b1b7-4e8d-814c-764a094101b5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.206140] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Created folder: Project (434d224eea6b48519395b747b79015b8) in parent group-v293962. [ 556.206140] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Creating folder: Instances. Parent ref: group-v293970. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.206790] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f74e07d-e1aa-4bdb-a169-632cefb9e8b5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.217640] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Created folder: Instances in parent group-v293970. [ 556.220174] env[62585]: DEBUG oslo.service.loopingcall [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.220845] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 556.222285] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70ab044c-57bf-4d40-8a55-33a4042cccde {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.252918] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.252918] env[62585]: value = "task-1384602" [ 556.252918] env[62585]: _type = "Task" [ 556.252918] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.261651] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384602, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.414199] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.567574] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.568094] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 556.573983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.590s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.670377] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "3b50dbde-2969-4a4b-ae35-42416342a60b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.670923] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "3b50dbde-2969-4a4b-ae35-42416342a60b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.768105] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384602, 'name': CreateVM_Task, 'duration_secs': 0.271974} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.768369] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 556.769869] env[62585]: DEBUG oslo_vmware.service [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e777af17-560d-4274-8a49-9011809107d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.778303] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.778614] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.778827] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.779079] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-084fe342-8b74-4102-a148-ea3d892b6165 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.783663] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 556.783663] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5247c202-961c-1c49-90e6-374e4b706971" [ 556.783663] env[62585]: _type = "Task" [ 556.783663] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.790982] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5247c202-961c-1c49-90e6-374e4b706971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.083689] env[62585]: DEBUG nova.compute.utils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 557.089699] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 557.091592] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 557.194188] env[62585]: DEBUG nova.policy [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e64ca3f58b3e4824a06386f8da0f6daa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7b958f143d54d499e921e90d9552490', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 557.304127] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.304248] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.304438] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.304757] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.304757] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 557.305183] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aae75235-c1a0-458a-9df1-5c3a50de847a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.316193] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 557.316394] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 557.319540] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73de5b78-03bc-45a1-bb0a-5cdd8c326c4e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.326105] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0227764d-8f17-4571-9e8a-7aaa65e7a3b5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.330996] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 557.330996] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e4cc8b-7023-9f38-c2f1-52e9a068c335" [ 557.330996] env[62585]: _type = "Task" [ 557.330996] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.340680] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e4cc8b-7023-9f38-c2f1-52e9a068c335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.590702] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 557.628950] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3f14d3-eb3a-4972-9cad-cbccc2251f70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.641264] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8155f693-e4e4-4a68-bee7-8ded56f49c83 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.676416] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4692e445-7bfb-421c-a4f9-09cc0502f448 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.686951] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9212b031-2e4e-48a4-ba61-9f663e3149c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.704510] env[62585]: DEBUG nova.compute.provider_tree [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.786841] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Successfully created port: bf826877-c169-4efd-a6cc-e1340c279cf4 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.842886] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Preparing fetch location {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 557.843329] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Creating directory with path [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 557.844126] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a033ea17-4b8f-48d1-9b1a-9c28c31913a5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.861660] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Created directory with path [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 557.861989] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Fetch image to [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 557.862087] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Downloading image file data 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk on the data store datastore2 {{(pid=62585) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 557.862825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79928d2e-473b-41d9-b750-05dad4417c2a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.873475] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa4083e-ce43-4e16-a44a-3b0c64c66b20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.885038] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6b6229-7e12-4831-b3f5-0065e41a6b00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.922405] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27deca30-9b6f-4d0d-8387-ed5697667a31 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.929789] env[62585]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6068fb9d-f9d6-4227-9dfb-f28f71092ba2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.015955] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Downloading image file data 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to the data store datastore2 {{(pid=62585) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 558.091622] env[62585]: DEBUG oslo_vmware.rw_handles [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62585) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 558.210527] env[62585]: DEBUG nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.667189] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 558.709068] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.709068] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.709487] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.709855] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.710095] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.710258] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.710466] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.710622] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.710890] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.710954] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.712086] env[62585]: DEBUG nova.virt.hardware [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.712762] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b43901-2723-4ae2-8a29-43720fe55b42 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.722575] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.148s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.723793] env[62585]: ERROR nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Traceback (most recent call last): [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self.driver.spawn(context, instance, image_meta, [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] vm_ref = self.build_virtual_machine(instance, [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.723793] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] for vif in network_info: [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return self._sync_wrapper(fn, *args, **kwargs) [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self.wait() [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self[:] = self._gt.wait() [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return self._exit_event.wait() [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] result = hub.switch() [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.724400] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return self.greenlet.switch() [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] result = function(*args, **kwargs) [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] return func(*args, **kwargs) [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] raise e [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] nwinfo = self.network_api.allocate_for_instance( [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] created_port_ids = self._update_ports_for_instance( [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] with excutils.save_and_reraise_exception(): [ 558.725177] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] self.force_reraise() [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] raise self.value [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] updated_port = self._update_port( [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] _ensure_no_port_binding_failure(port) [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] raise exception.PortBindingFailed(port_id=port['id']) [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] nova.exception.PortBindingFailed: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. [ 558.725599] env[62585]: ERROR nova.compute.manager [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] [ 558.725933] env[62585]: DEBUG nova.compute.utils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 558.730055] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.146s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.736634] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd325320-5175-4ac6-b474-ea20966a812f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.741307] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Build of instance 106dca6d-1ddf-4315-b645-c52c7c59f5d1 was re-scheduled: Binding failed for port f0854270-4feb-4756-a645-54b6d6320e21, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 558.741725] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 558.742013] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquiring lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.742120] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Acquired lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.742280] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 558.796593] env[62585]: DEBUG oslo_vmware.rw_handles [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Completed reading data from the image iterator. {{(pid=62585) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 558.796593] env[62585]: DEBUG oslo_vmware.rw_handles [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 558.938519] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Downloaded image file data 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk on the data store datastore2 {{(pid=62585) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 558.939883] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Caching image {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 558.940406] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Copying Virtual Disk [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk to [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 558.940855] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94f0392f-cd02-4520-a56a-dff70c5ded1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.949030] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 558.949030] env[62585]: value = "task-1384603" [ 558.949030] env[62585]: _type = "Task" [ 558.949030] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.958971] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.268439] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquiring lock "1cad8d1b-ed02-424c-879c-2f23d4d90b22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.268768] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Lock "1cad8d1b-ed02-424c-879c-2f23d4d90b22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.270931] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.365728] env[62585]: DEBUG nova.compute.manager [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Received event network-changed-bf826877-c169-4efd-a6cc-e1340c279cf4 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 559.365728] env[62585]: DEBUG nova.compute.manager [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Refreshing instance network info cache due to event network-changed-bf826877-c169-4efd-a6cc-e1340c279cf4. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 559.365728] env[62585]: DEBUG oslo_concurrency.lockutils [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] Acquiring lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.366801] env[62585]: DEBUG oslo_concurrency.lockutils [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] Acquired lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.367252] env[62585]: DEBUG nova.network.neutron [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Refreshing network info cache for port bf826877-c169-4efd-a6cc-e1340c279cf4 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 559.387504] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.465282] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384603, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.720486] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604451da-531f-4c92-aa2b-1f5c97082f6a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.729796] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d51b0dd-c6cb-43e8-b6bc-d0b21c6f3492 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.761109] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c51b15f-0e4b-4a08-91a1-692d71f0f083 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.768458] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a333328d-4fb7-49eb-998e-da8b6522fb95 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.782785] env[62585]: DEBUG nova.compute.provider_tree [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.887694] env[62585]: DEBUG nova.network.neutron [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.890075] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Releasing lock "refresh_cache-106dca6d-1ddf-4315-b645-c52c7c59f5d1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.890293] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 559.890468] env[62585]: DEBUG nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 559.890625] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.892830] env[62585]: ERROR nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 559.892830] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.892830] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.892830] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.892830] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.892830] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.892830] env[62585]: ERROR nova.compute.manager raise self.value [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.892830] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 559.892830] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.892830] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 559.893681] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.893681] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 559.893681] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 559.893681] env[62585]: ERROR nova.compute.manager [ 559.893681] env[62585]: Traceback (most recent call last): [ 559.893681] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 559.893681] env[62585]: listener.cb(fileno) [ 559.893681] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.893681] env[62585]: result = function(*args, **kwargs) [ 559.893681] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 559.893681] env[62585]: return func(*args, **kwargs) [ 559.893681] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 559.893681] env[62585]: raise e [ 559.893681] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.893681] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 559.893681] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.893681] env[62585]: created_port_ids = self._update_ports_for_instance( [ 559.893681] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.893681] env[62585]: with excutils.save_and_reraise_exception(): [ 559.893681] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.893681] env[62585]: self.force_reraise() [ 559.893681] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.893681] env[62585]: raise self.value [ 559.893681] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.893681] env[62585]: updated_port = self._update_port( [ 559.893681] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.893681] env[62585]: _ensure_no_port_binding_failure(port) [ 559.893681] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.893681] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 559.895552] env[62585]: nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 559.895552] env[62585]: Removing descriptor: 15 [ 559.895552] env[62585]: ERROR nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Traceback (most recent call last): [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] yield resources [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self.driver.spawn(context, instance, image_meta, [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 559.895552] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] vm_ref = self.build_virtual_machine(instance, [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] vif_infos = vmwarevif.get_vif_info(self._session, [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] for vif in network_info: [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return self._sync_wrapper(fn, *args, **kwargs) [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self.wait() [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self[:] = self._gt.wait() [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return self._exit_event.wait() [ 559.896686] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] result = hub.switch() [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return self.greenlet.switch() [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] result = function(*args, **kwargs) [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return func(*args, **kwargs) [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] raise e [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] nwinfo = self.network_api.allocate_for_instance( [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.897354] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] created_port_ids = self._update_ports_for_instance( [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] with excutils.save_and_reraise_exception(): [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self.force_reraise() [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] raise self.value [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] updated_port = self._update_port( [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] _ensure_no_port_binding_failure(port) [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.900363] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] raise exception.PortBindingFailed(port_id=port['id']) [ 559.900710] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 559.900710] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] [ 559.900710] env[62585]: INFO nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Terminating instance [ 559.900710] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquiring lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.917496] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.960248] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655189} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.960248] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Copied Virtual Disk [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk to [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 559.960435] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleting the datastore file [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/tmp-sparse.vmdk {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.960781] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4060434b-c756-469b-a68e-bf9ccf12e7d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.970152] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 559.970152] env[62585]: value = "task-1384604" [ 559.970152] env[62585]: _type = "Task" [ 559.970152] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.978494] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.036577] env[62585]: DEBUG nova.network.neutron [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.288020] env[62585]: DEBUG nova.scheduler.client.report [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.420819] env[62585]: DEBUG nova.network.neutron [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.477417] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025412} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.477663] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.477867] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Moving file from [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240/790c072e-fdf9-43ec-b7a5-3b21a2eaee40 to [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40. {{(pid=62585) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 560.478127] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c7c54048-b731-4849-9da4-9169992558f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.485135] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 560.485135] env[62585]: value = "task-1384605" [ 560.485135] env[62585]: _type = "Task" [ 560.485135] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.492788] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384605, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.537834] env[62585]: DEBUG oslo_concurrency.lockutils [req-09cedd00-86f2-42d9-b1a1-bad74170506e req-e7941ebd-aed1-4e21-9638-9e2d8f5507ac service nova] Releasing lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.539085] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquired lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.539085] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 560.791323] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.061s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.791975] env[62585]: ERROR nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Traceback (most recent call last): [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self.driver.spawn(context, instance, image_meta, [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] vm_ref = self.build_virtual_machine(instance, [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 560.791975] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] for vif in network_info: [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return self._sync_wrapper(fn, *args, **kwargs) [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self.wait() [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self[:] = self._gt.wait() [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return self._exit_event.wait() [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] result = hub.switch() [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 560.792422] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return self.greenlet.switch() [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] result = function(*args, **kwargs) [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] return func(*args, **kwargs) [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] raise e [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] nwinfo = self.network_api.allocate_for_instance( [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] created_port_ids = self._update_ports_for_instance( [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] with excutils.save_and_reraise_exception(): [ 560.792852] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] self.force_reraise() [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] raise self.value [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] updated_port = self._update_port( [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] _ensure_no_port_binding_failure(port) [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] raise exception.PortBindingFailed(port_id=port['id']) [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] nova.exception.PortBindingFailed: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. [ 560.793285] env[62585]: ERROR nova.compute.manager [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] [ 560.793706] env[62585]: DEBUG nova.compute.utils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 560.795183] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.399s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.798012] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Build of instance 598c7b4f-8239-45af-8bc5-caf6b47172ba was re-scheduled: Binding failed for port f650ce79-b0d6-4a2c-82dd-cfd77974d2f7, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 560.798633] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 560.798875] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquiring lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.799037] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Acquired lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.799261] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 560.923557] env[62585]: INFO nova.compute.manager [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] [instance: 106dca6d-1ddf-4315-b645-c52c7c59f5d1] Took 1.03 seconds to deallocate network for instance. [ 560.998533] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384605, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.135507} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.998533] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] File moved {{(pid=62585) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 560.998533] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Cleaning up location [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 560.998533] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleting the datastore file [datastore2] vmware_temp/a55de780-adab-4508-9d4f-a6be158f6240 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 560.998533] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbbb5c91-e485-47b1-94bc-94375b6d9d80 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.006969] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 561.006969] env[62585]: value = "task-1384606" [ 561.006969] env[62585]: _type = "Task" [ 561.006969] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.014893] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.057280] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.221019] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.330795] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.445110] env[62585]: DEBUG nova.compute.manager [req-143f3624-0504-4b53-8210-26b531e5878c req-730726e6-21f7-4341-93bb-be858e8c6d22 service nova] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Received event network-vif-deleted-bf826877-c169-4efd-a6cc-e1340c279cf4 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 561.450315] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.518820] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02378} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.521513] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 561.523055] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b7d1d76-a719-47d5-b169-f2e033f5b9ba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.527929] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 561.527929] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529329ae-ff2c-9d5a-468d-7d5622dd92f3" [ 561.527929] env[62585]: _type = "Task" [ 561.527929] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.536451] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529329ae-ff2c-9d5a-468d-7d5622dd92f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.720931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Releasing lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.721368] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 561.721555] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 561.721932] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1e00e90-8226-488c-8244-3f86f33c67b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.737670] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc427d2f-d3a2-4757-aa54-83040f0bf3bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.752983] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c133fe5-b985-452b-9190-66661a3e3bdf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.760298] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94c40cf-d905-4b57-bc33-0d850aec08e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.768215] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1531ed40-29c2-4812-afd5-eabffe22f4ea could not be found. [ 561.768429] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 561.768606] env[62585]: INFO nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Took 0.05 seconds to destroy the instance on the hypervisor. [ 561.769252] env[62585]: DEBUG oslo.service.loopingcall [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.795754] env[62585]: DEBUG nova.compute.manager [-] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 561.795860] env[62585]: DEBUG nova.network.neutron [-] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 561.798249] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb25408-6e45-4eb9-a187-332482d6e685 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.805946] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ec812a-4e1b-450d-afd0-6733f145dcbf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.821023] env[62585]: DEBUG nova.compute.provider_tree [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.821689] env[62585]: DEBUG nova.network.neutron [-] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.960952] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Releasing lock "refresh_cache-598c7b4f-8239-45af-8bc5-caf6b47172ba" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.961141] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 561.961346] env[62585]: DEBUG nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 561.961974] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 561.981692] env[62585]: INFO nova.scheduler.client.report [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Deleted allocations for instance 106dca6d-1ddf-4315-b645-c52c7c59f5d1 [ 561.991481] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.039989] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529329ae-ff2c-9d5a-468d-7d5622dd92f3, 'name': SearchDatastore_Task, 'duration_secs': 0.009102} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.040302] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.040554] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.040802] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49c0ef17-0ad4-4519-8d12-3f9ff52e7e44 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.048246] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 562.048246] env[62585]: value = "task-1384607" [ 562.048246] env[62585]: _type = "Task" [ 562.048246] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.061202] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.325161] env[62585]: DEBUG nova.scheduler.client.report [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 562.328683] env[62585]: DEBUG nova.network.neutron [-] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.489741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-00502d2c-9726-4c36-9c65-89eb792983b8 tempest-ServerTagsTestJSON-746744466 tempest-ServerTagsTestJSON-746744466-project-member] Lock "106dca6d-1ddf-4315-b645-c52c7c59f5d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.886s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.497036] env[62585]: DEBUG nova.network.neutron [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.562829] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482003} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.563124] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 562.563430] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 562.563731] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4105565-536b-44ff-96c4-10d434a03a7d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.573352] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 562.573352] env[62585]: value = "task-1384608" [ 562.573352] env[62585]: _type = "Task" [ 562.573352] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.587050] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384608, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.833033] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.037s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.833169] env[62585]: ERROR nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Traceback (most recent call last): [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self.driver.spawn(context, instance, image_meta, [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] vm_ref = self.build_virtual_machine(instance, [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] vif_infos = vmwarevif.get_vif_info(self._session, [ 562.833169] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] for vif in network_info: [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return self._sync_wrapper(fn, *args, **kwargs) [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self.wait() [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self[:] = self._gt.wait() [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return self._exit_event.wait() [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] result = hub.switch() [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 562.834724] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return self.greenlet.switch() [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] result = function(*args, **kwargs) [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] return func(*args, **kwargs) [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] raise e [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] nwinfo = self.network_api.allocate_for_instance( [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] created_port_ids = self._update_ports_for_instance( [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] with excutils.save_and_reraise_exception(): [ 562.835107] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] self.force_reraise() [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] raise self.value [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] updated_port = self._update_port( [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] _ensure_no_port_binding_failure(port) [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] raise exception.PortBindingFailed(port_id=port['id']) [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] nova.exception.PortBindingFailed: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. [ 562.835484] env[62585]: ERROR nova.compute.manager [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] [ 562.835794] env[62585]: DEBUG nova.compute.utils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 562.836096] env[62585]: INFO nova.compute.manager [-] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Took 1.04 seconds to deallocate network for instance. [ 562.836621] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Build of instance 3d05d741-1b46-4646-8269-f72dc6ad5cbd was re-scheduled: Binding failed for port 697d8534-47a6-44ae-8467-6ab6f6378b6f, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 562.837075] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 562.837310] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquiring lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.837446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Acquired lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.837595] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.838589] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.393s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.840042] env[62585]: INFO nova.compute.claims [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.854546] env[62585]: DEBUG nova.compute.claims [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 562.854731] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.883807] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.966754] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.994711] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 563.001584] env[62585]: INFO nova.compute.manager [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] [instance: 598c7b4f-8239-45af-8bc5-caf6b47172ba] Took 1.04 seconds to deallocate network for instance. [ 563.087046] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069719} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.087383] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.089387] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc2f3dd-5204-414a-9ae9-0fce37a90041 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.109704] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.110327] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dc1cfa5-87a2-4b7f-8ce8-d87bfc43c080 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.133044] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 563.133044] env[62585]: value = "task-1384609" [ 563.133044] env[62585]: _type = "Task" [ 563.133044] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.142257] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384609, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.470057] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Releasing lock "refresh_cache-3d05d741-1b46-4646-8269-f72dc6ad5cbd" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.472503] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 563.472503] env[62585]: DEBUG nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 563.472503] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 563.487707] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.522480] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.644752] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384609, 'name': ReconfigVM_Task, 'duration_secs': 0.260988} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.644752] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Reconfigured VM instance instance-0000000b to attach disk [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.645134] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe1062d3-3423-4141-b97f-82425438554b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.651849] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 563.651849] env[62585]: value = "task-1384610" [ 563.651849] env[62585]: _type = "Task" [ 563.651849] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.660473] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384610, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.937907] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquiring lock "ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.938177] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Lock "ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.990832] env[62585]: DEBUG nova.network.neutron [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.040531] env[62585]: INFO nova.scheduler.client.report [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Deleted allocations for instance 598c7b4f-8239-45af-8bc5-caf6b47172ba [ 564.167889] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384610, 'name': Rename_Task, 'duration_secs': 0.132385} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.170947] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.172554] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-817b51af-7943-4a91-a49d-ccb68dbdec85 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.180580] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 564.180580] env[62585]: value = "task-1384611" [ 564.180580] env[62585]: _type = "Task" [ 564.180580] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.318098] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquiring lock "ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.318345] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Lock "ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.388018] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e2deca-3a38-4056-826e-5e3dd191220e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.396441] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dd6b7b-586c-46d4-b861-78e72c557ac4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.428277] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbeda762-9bb8-4ba8-930f-3f2691275728 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.437836] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843b867a-57b1-4baf-8126-10537d0768d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.456148] env[62585]: DEBUG nova.compute.provider_tree [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.494457] env[62585]: INFO nova.compute.manager [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] [instance: 3d05d741-1b46-4646-8269-f72dc6ad5cbd] Took 1.02 seconds to deallocate network for instance. [ 564.555861] env[62585]: DEBUG oslo_concurrency.lockutils [None req-54cc4cdf-ba31-4759-b310-b0a70f9d6549 tempest-ServersAdminTestJSON-1845719341 tempest-ServersAdminTestJSON-1845719341-project-member] Lock "598c7b4f-8239-45af-8bc5-caf6b47172ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.934s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.689923] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384611, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.959926] env[62585]: DEBUG nova.scheduler.client.report [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 565.058973] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 565.194026] env[62585]: DEBUG oslo_vmware.api [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384611, 'name': PowerOnVM_Task, 'duration_secs': 0.521448} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.194442] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 565.194532] env[62585]: INFO nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Took 9.09 seconds to spawn the instance on the hypervisor. [ 565.194671] env[62585]: DEBUG nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 565.195467] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6928448-e6b9-4963-bf4e-8ad96c8697b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.465738] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.466274] env[62585]: DEBUG nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 565.469342] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.017s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.470920] env[62585]: INFO nova.compute.claims [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.532044] env[62585]: INFO nova.scheduler.client.report [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Deleted allocations for instance 3d05d741-1b46-4646-8269-f72dc6ad5cbd [ 565.582703] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.716792] env[62585]: INFO nova.compute.manager [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Took 36.36 seconds to build instance. [ 565.971144] env[62585]: DEBUG nova.compute.utils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.974675] env[62585]: DEBUG nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 566.040534] env[62585]: DEBUG oslo_concurrency.lockutils [None req-49354887-0bf0-4fe3-a2e6-14ff79bb9938 tempest-ServerDiagnosticsTest-1315872934 tempest-ServerDiagnosticsTest-1315872934-project-member] Lock "3d05d741-1b46-4646-8269-f72dc6ad5cbd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.106s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.151995] env[62585]: INFO nova.compute.manager [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Rebuilding instance [ 566.215070] env[62585]: DEBUG nova.compute.manager [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 566.215865] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f25cb5-bf0d-445b-8451-cb06e6d4cd74 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.219414] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5bd681c-baa2-4da6-9905-0720c1f294d0 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "dd387320-7101-440c-80bc-a7d19a654df8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.690s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.476761] env[62585]: DEBUG nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 566.544951] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 566.722809] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 566.728693] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 566.730031] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-731a9386-d3ca-4293-9263-76e5b5ae0a67 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.736067] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 566.736067] env[62585]: value = "task-1384612" [ 566.736067] env[62585]: _type = "Task" [ 566.736067] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.744683] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.914754] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03dbe771-1983-48a0-8ff1-b9d3b6bc362b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.922727] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c509f47d-a0f3-4e77-9013-eec18575f71f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.955089] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d757584-3031-4a8b-9356-c1e0d89c76f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.964366] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ecb748-3a5b-4232-9855-7c2c80306e36 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.977523] env[62585]: DEBUG nova.compute.provider_tree [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.074330] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.247959] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384612, 'name': PowerOffVM_Task, 'duration_secs': 0.118112} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.248985] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 567.249491] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 567.254751] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b0edc8-f2b6-4d09-bb1f-a3eeb2f83305 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.263474] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 567.264355] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.264591] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb30b81c-ed29-4672-acfa-f1c5aa4efce6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.295940] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 567.296470] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 567.296663] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleting the datastore file [datastore2] dd387320-7101-440c-80bc-a7d19a654df8 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 567.297037] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c69e96ad-bdd2-4317-bd3d-8b69c23c7756 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.305856] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 567.305856] env[62585]: value = "task-1384614" [ 567.305856] env[62585]: _type = "Task" [ 567.305856] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.314355] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.483903] env[62585]: DEBUG nova.scheduler.client.report [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.493117] env[62585]: DEBUG nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 567.533172] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.533432] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.533613] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.533823] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.533964] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.534198] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.534361] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.534520] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.534731] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.535190] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.535296] env[62585]: DEBUG nova.virt.hardware [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.537299] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5476296-c456-444c-a312-45030dae8254 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.547819] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c032e33-4302-4958-9b72-3630c10dd403 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.563528] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.570298] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Creating folder: Project (04f73661b7d24132bf6a5bc88543ee27). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 567.570298] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-034f616f-4648-43a9-8e24-e3dbe9465602 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.580414] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Created folder: Project (04f73661b7d24132bf6a5bc88543ee27) in parent group-v293962. [ 567.580636] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Creating folder: Instances. Parent ref: group-v293973. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 567.580836] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd44b1bb-bdbd-4593-a475-8825ea4275eb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.589411] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Created folder: Instances in parent group-v293973. [ 567.589715] env[62585]: DEBUG oslo.service.loopingcall [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.589811] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 567.590054] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f66fe45-2aff-403c-aba1-4ac108375f95 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.611923] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.611923] env[62585]: value = "task-1384617" [ 567.611923] env[62585]: _type = "Task" [ 567.611923] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.625790] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384617, 'name': CreateVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.815887] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104186} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.816248] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 567.816392] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 567.816565] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 567.998236] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.998440] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 568.005987] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.319s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.005987] env[62585]: DEBUG nova.objects.instance [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lazy-loading 'resources' on Instance uuid 779efd7e-99d5-4065-8ade-1665533677a4 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 568.122701] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384617, 'name': CreateVM_Task, 'duration_secs': 0.325585} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.123115] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 568.123667] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.123847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.124272] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 568.124416] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baa2e571-0e69-4c3d-8aaa-474a35bd8d19 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.130132] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 568.130132] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5221c955-9f0a-f730-5e7e-9ac53e3a7463" [ 568.130132] env[62585]: _type = "Task" [ 568.130132] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.138139] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5221c955-9f0a-f730-5e7e-9ac53e3a7463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.511225] env[62585]: DEBUG nova.compute.utils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 568.517492] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 568.517492] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 568.567926] env[62585]: DEBUG nova.policy [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4799c792766f4d3ab6eb5625ff2210bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6846a29cd93d44c495c491f4287fefce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 568.645427] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5221c955-9f0a-f730-5e7e-9ac53e3a7463, 'name': SearchDatastore_Task, 'duration_secs': 0.011231} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.645807] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.646097] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.646402] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.646854] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.647416] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 568.650538] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e5bd02b-70ea-40b4-821b-83c795680a82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.659569] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 568.659791] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 568.660432] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-906b9b33-b8ad-4fc6-a46e-73707180530a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.669285] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 568.669285] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52480926-604a-236b-dc79-ea455e2a3065" [ 568.669285] env[62585]: _type = "Task" [ 568.669285] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.680163] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52480926-604a-236b-dc79-ea455e2a3065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.866968] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 568.867295] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 568.867359] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.867533] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 568.867677] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.867819] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 568.868037] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 568.868203] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 568.868364] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 568.868522] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 568.868686] env[62585]: DEBUG nova.virt.hardware [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 568.869544] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6897cbc-9cc8-466a-aa62-004ca729e36d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.881115] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b486c805-e968-40cb-b5f8-1ab144e5ba42 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.893720] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.904486] env[62585]: DEBUG oslo.service.loopingcall [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.908278] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 568.908278] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56a3f370-4bc3-48b1-95ee-a0c0d2b3c032 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.925564] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.925564] env[62585]: value = "task-1384618" [ 568.925564] env[62585]: _type = "Task" [ 568.925564] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.935502] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384618, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.969102] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82823b8b-67cb-4b18-9d41-09c3a4dd063a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.979825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e86a643-212a-4e3c-a1d2-6b28c183db0f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.019416] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 569.023049] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Successfully created port: f88a1b20-5c93-481f-ac6a-b74c531713fc {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.025937] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36b4c0f-1034-4d29-8ca2-e26a7ee373c1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.034339] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28259add-30b0-42d9-9548-09ae159b04cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.054046] env[62585]: DEBUG nova.compute.provider_tree [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.182229] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52480926-604a-236b-dc79-ea455e2a3065, 'name': SearchDatastore_Task, 'duration_secs': 0.026266} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.183237] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e9a0a6d-69e7-4f29-9966-29fc81597047 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.190863] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 569.190863] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527329dd-511d-11be-374c-c0e9da9d1970" [ 569.190863] env[62585]: _type = "Task" [ 569.190863] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.201922] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527329dd-511d-11be-374c-c0e9da9d1970, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.436814] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384618, 'name': CreateVM_Task, 'duration_secs': 0.295171} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.437043] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 569.437419] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.437626] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.438098] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.438495] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47e264fa-b305-4d47-bf78-3f6aa69eaf20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.443128] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 569.443128] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5226363f-a396-e12d-d2f5-350ac4e5164c" [ 569.443128] env[62585]: _type = "Task" [ 569.443128] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.451900] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5226363f-a396-e12d-d2f5-350ac4e5164c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.558073] env[62585]: DEBUG nova.scheduler.client.report [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.702983] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527329dd-511d-11be-374c-c0e9da9d1970, 'name': SearchDatastore_Task, 'duration_secs': 0.010679} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.703618] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.704033] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 569.704753] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5426aaf5-5237-4c1c-96e2-a68e7bfea132 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.713067] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 569.713067] env[62585]: value = "task-1384619" [ 569.713067] env[62585]: _type = "Task" [ 569.713067] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.722070] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.955240] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5226363f-a396-e12d-d2f5-350ac4e5164c, 'name': SearchDatastore_Task, 'duration_secs': 0.009124} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.956068] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.956471] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.956846] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.957139] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.957462] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 569.957842] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9accbc4f-0011-499d-89a4-d0eb38b6bbdc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.974033] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 569.974033] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 569.974033] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f6ca9f2-a4a8-4c99-8a97-224656107fb5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.980718] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 569.980718] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c4472c-10ff-d89c-224e-a39f8a2b54ec" [ 569.980718] env[62585]: _type = "Task" [ 569.980718] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.991014] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c4472c-10ff-d89c-224e-a39f8a2b54ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.037640] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 570.068440] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:59:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2139430005',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1915196175',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 570.068651] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 570.068838] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.069813] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 570.070314] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.070392] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 570.070690] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 570.070929] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 570.071207] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 570.071588] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 570.071711] env[62585]: DEBUG nova.virt.hardware [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 570.073160] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.076161] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c81614-631d-4afb-9247-b5de5ae7e20f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.079506] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.850s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.080946] env[62585]: INFO nova.compute.claims [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.091493] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c492a7-8d3d-4d9d-ba44-98642af3e177 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.132105] env[62585]: INFO nova.scheduler.client.report [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Deleted allocations for instance 779efd7e-99d5-4065-8ade-1665533677a4 [ 570.226871] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469179} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.227233] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 570.227454] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 570.227731] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37f2c1ae-fb4a-40dc-b488-797ebd6f6488 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.237435] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 570.237435] env[62585]: value = "task-1384620" [ 570.237435] env[62585]: _type = "Task" [ 570.237435] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.245192] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384620, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.496891] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c4472c-10ff-d89c-224e-a39f8a2b54ec, 'name': SearchDatastore_Task, 'duration_secs': 0.057661} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.497754] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b6d5d86-b73a-433b-9234-145c8a41df7c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.505246] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 570.505246] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52474e73-c93d-ad64-5442-148b744046c5" [ 570.505246] env[62585]: _type = "Task" [ 570.505246] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.516613] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52474e73-c93d-ad64-5442-148b744046c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.641029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-085c9809-428d-4d96-8962-0fe93d2ab8ac tempest-ServerDiagnosticsV248Test-639487555 tempest-ServerDiagnosticsV248Test-639487555-project-member] Lock "779efd7e-99d5-4065-8ade-1665533677a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.795s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.748340] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384620, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062329} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.749022] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 570.751656] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2e5044-8e8a-4a65-a9d1-c95d6f1301ba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.775880] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 570.775880] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d03a9309-35f5-4799-b08f-388b1365a47f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.797224] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 570.797224] env[62585]: value = "task-1384621" [ 570.797224] env[62585]: _type = "Task" [ 570.797224] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.808398] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.018800] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52474e73-c93d-ad64-5442-148b744046c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009144} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.019477] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.019879] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 571.020290] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5659513-25f4-414d-be36-4e309e867b3c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.028684] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 571.028684] env[62585]: value = "task-1384622" [ 571.028684] env[62585]: _type = "Task" [ 571.028684] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.036942] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.078815] env[62585]: DEBUG nova.compute.manager [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Received event network-changed-f88a1b20-5c93-481f-ac6a-b74c531713fc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 571.080712] env[62585]: DEBUG nova.compute.manager [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Refreshing instance network info cache due to event network-changed-f88a1b20-5c93-481f-ac6a-b74c531713fc. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 571.081020] env[62585]: DEBUG oslo_concurrency.lockutils [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] Acquiring lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.081243] env[62585]: DEBUG oslo_concurrency.lockutils [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] Acquired lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.081533] env[62585]: DEBUG nova.network.neutron [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Refreshing network info cache for port f88a1b20-5c93-481f-ac6a-b74c531713fc {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.315657] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384621, 'name': ReconfigVM_Task, 'duration_secs': 0.276511} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.318696] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 571.318696] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e48a0c0-5fd8-4e9b-96a1-e3eacca42bce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.327256] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 571.327256] env[62585]: value = "task-1384623" [ 571.327256] env[62585]: _type = "Task" [ 571.327256] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.345595] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384623, 'name': Rename_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.442500] env[62585]: ERROR nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 571.442500] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.442500] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.442500] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.442500] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.442500] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.442500] env[62585]: ERROR nova.compute.manager raise self.value [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.442500] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 571.442500] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.442500] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 571.443016] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.443016] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 571.443016] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 571.443016] env[62585]: ERROR nova.compute.manager [ 571.443016] env[62585]: Traceback (most recent call last): [ 571.443016] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 571.443016] env[62585]: listener.cb(fileno) [ 571.443016] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.443016] env[62585]: result = function(*args, **kwargs) [ 571.443016] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 571.443016] env[62585]: return func(*args, **kwargs) [ 571.443016] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.443016] env[62585]: raise e [ 571.443016] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.443016] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 571.443016] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.443016] env[62585]: created_port_ids = self._update_ports_for_instance( [ 571.443016] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.443016] env[62585]: with excutils.save_and_reraise_exception(): [ 571.443016] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.443016] env[62585]: self.force_reraise() [ 571.443016] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.443016] env[62585]: raise self.value [ 571.443016] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.443016] env[62585]: updated_port = self._update_port( [ 571.443016] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.443016] env[62585]: _ensure_no_port_binding_failure(port) [ 571.443016] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.443016] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 571.443943] env[62585]: nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 571.443943] env[62585]: Removing descriptor: 17 [ 571.443943] env[62585]: ERROR nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Traceback (most recent call last): [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] yield resources [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self.driver.spawn(context, instance, image_meta, [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.443943] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] vm_ref = self.build_virtual_machine(instance, [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] for vif in network_info: [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return self._sync_wrapper(fn, *args, **kwargs) [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self.wait() [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self[:] = self._gt.wait() [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return self._exit_event.wait() [ 571.444327] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] result = hub.switch() [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return self.greenlet.switch() [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] result = function(*args, **kwargs) [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return func(*args, **kwargs) [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] raise e [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] nwinfo = self.network_api.allocate_for_instance( [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.444707] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] created_port_ids = self._update_ports_for_instance( [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] with excutils.save_and_reraise_exception(): [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self.force_reraise() [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] raise self.value [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] updated_port = self._update_port( [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] _ensure_no_port_binding_failure(port) [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.445115] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] raise exception.PortBindingFailed(port_id=port['id']) [ 571.445529] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 571.445529] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] [ 571.445529] env[62585]: INFO nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Terminating instance [ 571.446653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquiring lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.540694] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384622, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.561233] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed0caa8-fdc2-4829-b5ee-3455f8d1292f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.568210] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234ec698-218e-4b03-a070-4b4c4ecd46ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.606221] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27d6579-0730-4c6c-a650-72d5dbe66c7f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.612730] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5c4e32-3ac6-4437-8ede-d549b6266454 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.626864] env[62585]: DEBUG nova.compute.provider_tree [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.629417] env[62585]: DEBUG nova.network.neutron [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.761186] env[62585]: DEBUG nova.network.neutron [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.845741] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384623, 'name': Rename_Task, 'duration_secs': 0.157166} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.846275] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 571.846696] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6309ffe1-1cf2-4501-b197-64593ae4fc6f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.854865] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 571.854865] env[62585]: value = "task-1384624" [ 571.854865] env[62585]: _type = "Task" [ 571.854865] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.865361] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384624, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.043082] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523792} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.043504] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 572.044035] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 572.044035] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fb7834f-9742-4631-88a6-8f5eeb50ba0f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.050186] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 572.050186] env[62585]: value = "task-1384625" [ 572.050186] env[62585]: _type = "Task" [ 572.050186] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.062181] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384625, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.131921] env[62585]: DEBUG nova.scheduler.client.report [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 572.266618] env[62585]: DEBUG oslo_concurrency.lockutils [req-c0a47b26-3ff2-4d45-8b8b-3c6dc7cf2f0d req-3b605c8f-6916-43d5-b0d4-b5ae4a5b1b60 service nova] Releasing lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.266979] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquired lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.267241] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 572.369623] env[62585]: DEBUG oslo_vmware.api [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384624, 'name': PowerOnVM_Task, 'duration_secs': 0.472128} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.369855] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 572.370088] env[62585]: INFO nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Took 4.88 seconds to spawn the instance on the hypervisor. [ 572.370281] env[62585]: DEBUG nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 572.373234] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3628cfb4-a29a-4560-a9bc-89d9978f4b7b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.560886] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384625, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065608} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.562992] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 572.563684] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed2daf9-efa8-4fcf-acad-9daa77666b76 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.593214] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 572.593731] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae2af260-6210-469f-a904-d4024b851c2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.616040] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 572.616040] env[62585]: value = "task-1384626" [ 572.616040] env[62585]: _type = "Task" [ 572.616040] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.623020] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384626, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.638553] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.639087] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 572.643793] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.318s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.644148] env[62585]: INFO nova.compute.claims [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.795990] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.894948] env[62585]: INFO nova.compute.manager [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Took 34.47 seconds to build instance. [ 572.931300] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.001496] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquiring lock "0b4d919f-552e-489e-bcfb-f6447cf81fb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.002020] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Lock "0b4d919f-552e-489e-bcfb-f6447cf81fb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.128698] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.151461] env[62585]: DEBUG nova.compute.utils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 573.154224] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 573.154224] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 573.249561] env[62585]: DEBUG nova.policy [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6489d5d61d36493891077c5fb2b86dc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91b69b3b9e9b407ea5639ecb7d7e61e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 573.301035] env[62585]: DEBUG nova.compute.manager [req-e726ede4-7d23-4b63-9170-631f389cbb26 req-8c8a5694-a55b-4430-951f-dde9b4b2ea38 service nova] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Received event network-vif-deleted-f88a1b20-5c93-481f-ac6a-b74c531713fc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.396476] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80fdb8dd-841c-44ab-a89e-189ccc41d5e1 tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "149bd77b-9583-42e5-8c82-f795cac53b87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.346s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.436381] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Releasing lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.436813] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 573.436993] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 573.437322] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e75db75-3c33-4bc1-80b0-c92b994179d1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.452075] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9261b95-5cda-4663-906b-7aa45f295138 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.490261] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01432003-5c48-40e1-b22b-a538a7e34663 could not be found. [ 573.490261] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 573.490261] env[62585]: INFO nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Took 0.05 seconds to destroy the instance on the hypervisor. [ 573.490261] env[62585]: DEBUG oslo.service.loopingcall [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.490261] env[62585]: DEBUG nova.compute.manager [-] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.490459] env[62585]: DEBUG nova.network.neutron [-] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 573.528984] env[62585]: DEBUG nova.network.neutron [-] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.610578] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Successfully created port: 43ce9572-81e6-47d4-9fc6-74bcf320e382 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.626634] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384626, 'name': ReconfigVM_Task, 'duration_secs': 0.721949} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.626634] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Reconfigured VM instance instance-0000000b to attach disk [datastore1] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 573.626634] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b78091ae-cee2-4900-b2bf-d4ddcb6b1124 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.637022] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 573.637022] env[62585]: value = "task-1384627" [ 573.637022] env[62585]: _type = "Task" [ 573.637022] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.644896] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384627, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.661859] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 573.901816] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 574.003826] env[62585]: INFO nova.compute.manager [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Rebuilding instance [ 574.034965] env[62585]: DEBUG nova.network.neutron [-] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.067341] env[62585]: DEBUG nova.compute.manager [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 574.068456] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f641c6ee-c6e1-41b5-a7af-93b606749311 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.118034] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869f3d9b-da09-4088-9036-41edd971ceb6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.127122] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2fd234-45c3-45fb-a490-db68c8d249a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.179685] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48537e81-b43e-4523-9a74-27b5f0a98ca5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.194209] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384627, 'name': Rename_Task, 'duration_secs': 0.470988} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.194578] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 574.199326] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b8fc7c-be15-4857-9ee8-86afcc42deb4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.201278] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f03654fb-f63c-4ebe-bdcc-e7e37e97e438 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.218987] env[62585]: DEBUG nova.compute.provider_tree [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.220412] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 574.220412] env[62585]: value = "task-1384628" [ 574.220412] env[62585]: _type = "Task" [ 574.220412] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.231841] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384628, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.436713] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.539355] env[62585]: INFO nova.compute.manager [-] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Took 1.05 seconds to deallocate network for instance. [ 574.541784] env[62585]: DEBUG nova.compute.claims [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 574.541934] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.585576] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 574.590014] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4479715-801b-4b09-8478-643ee0481766 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.593827] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 574.593827] env[62585]: value = "task-1384629" [ 574.593827] env[62585]: _type = "Task" [ 574.593827] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.603324] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.675735] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 574.700014] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 574.700014] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 574.700014] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.700302] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 574.700302] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.700377] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 574.700566] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 574.700719] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 574.700877] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 574.701051] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 574.701232] env[62585]: DEBUG nova.virt.hardware [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 574.702138] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28989f65-c624-410d-9696-7a9014a16af6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.710135] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f272e665-2963-4a68-a6b8-b263fe6abb11 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.724143] env[62585]: DEBUG nova.scheduler.client.report [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 574.737670] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384628, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.975090] env[62585]: ERROR nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 574.975090] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.975090] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.975090] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.975090] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.975090] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.975090] env[62585]: ERROR nova.compute.manager raise self.value [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.975090] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 574.975090] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.975090] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 574.975722] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.975722] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 574.975722] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 574.975722] env[62585]: ERROR nova.compute.manager [ 574.975722] env[62585]: Traceback (most recent call last): [ 574.975722] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 574.975722] env[62585]: listener.cb(fileno) [ 574.975722] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.975722] env[62585]: result = function(*args, **kwargs) [ 574.975722] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 574.975722] env[62585]: return func(*args, **kwargs) [ 574.975722] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.975722] env[62585]: raise e [ 574.975722] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.975722] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 574.975722] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.975722] env[62585]: created_port_ids = self._update_ports_for_instance( [ 574.975722] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.975722] env[62585]: with excutils.save_and_reraise_exception(): [ 574.975722] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.975722] env[62585]: self.force_reraise() [ 574.975722] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.975722] env[62585]: raise self.value [ 574.975722] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.975722] env[62585]: updated_port = self._update_port( [ 574.975722] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.975722] env[62585]: _ensure_no_port_binding_failure(port) [ 574.975722] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.975722] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 574.976727] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 574.976727] env[62585]: Removing descriptor: 17 [ 574.976727] env[62585]: ERROR nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Traceback (most recent call last): [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] yield resources [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self.driver.spawn(context, instance, image_meta, [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.976727] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] vm_ref = self.build_virtual_machine(instance, [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] for vif in network_info: [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return self._sync_wrapper(fn, *args, **kwargs) [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self.wait() [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self[:] = self._gt.wait() [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return self._exit_event.wait() [ 574.977272] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] result = hub.switch() [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return self.greenlet.switch() [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] result = function(*args, **kwargs) [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return func(*args, **kwargs) [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] raise e [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] nwinfo = self.network_api.allocate_for_instance( [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.977817] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] created_port_ids = self._update_ports_for_instance( [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] with excutils.save_and_reraise_exception(): [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self.force_reraise() [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] raise self.value [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] updated_port = self._update_port( [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] _ensure_no_port_binding_failure(port) [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.981952] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] raise exception.PortBindingFailed(port_id=port['id']) [ 574.982373] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 574.982373] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] [ 574.982373] env[62585]: INFO nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Terminating instance [ 574.984961] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquiring lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.985562] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquired lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.988434] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 575.108874] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.234169] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.234681] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 575.244528] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.830s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.250114] env[62585]: INFO nova.compute.claims [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 575.251701] env[62585]: DEBUG oslo_vmware.api [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384628, 'name': PowerOnVM_Task, 'duration_secs': 0.846869} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.252676] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 575.252877] env[62585]: DEBUG nova.compute.manager [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 575.253966] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3366413-1411-4363-a338-1ca96d4ac624 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.448381] env[62585]: DEBUG nova.compute.manager [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Received event network-changed-43ce9572-81e6-47d4-9fc6-74bcf320e382 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.448692] env[62585]: DEBUG nova.compute.manager [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Refreshing instance network info cache due to event network-changed-43ce9572-81e6-47d4-9fc6-74bcf320e382. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 575.449904] env[62585]: DEBUG oslo_concurrency.lockutils [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] Acquiring lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.515177] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.608798] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384629, 'name': PowerOffVM_Task, 'duration_secs': 0.612812} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.612264] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 575.612264] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 575.612264] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27a2844-d0f9-4f70-ab87-efc4753ca268 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.620779] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 575.621270] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0227aacd-9485-4fed-b893-92c0ed3f4fbe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.625683] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.647154] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 575.647154] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 575.647154] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Deleting the datastore file [datastore1] 149bd77b-9583-42e5-8c82-f795cac53b87 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.647154] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9687f151-6348-4e02-9633-1d68ec9dfdae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.653619] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 575.653619] env[62585]: value = "task-1384631" [ 575.653619] env[62585]: _type = "Task" [ 575.653619] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.666018] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.754667] env[62585]: DEBUG nova.compute.utils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.758582] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 575.758684] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 575.780773] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.841777] env[62585]: DEBUG nova.policy [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd330533164a4cbd9e9194bcba248dcf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ad8cfc3cd144940be55ba091ee86f28', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 576.130551] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Releasing lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.131997] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 576.132257] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 576.132609] env[62585]: DEBUG oslo_concurrency.lockutils [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] Acquired lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.132835] env[62585]: DEBUG nova.network.neutron [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Refreshing network info cache for port 43ce9572-81e6-47d4-9fc6-74bcf320e382 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 576.139214] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9a2cf6a-bf13-4fcb-bdf2-2888b99e921b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.156222] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ebc78b-988d-422e-a98c-6a26d8f73bbb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.177855] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432976} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.183728] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 576.183812] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 576.183940] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 576.187079] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7 could not be found. [ 576.187727] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 576.189025] env[62585]: INFO nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Took 0.06 seconds to destroy the instance on the hypervisor. [ 576.189025] env[62585]: DEBUG oslo.service.loopingcall [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.192320] env[62585]: DEBUG nova.compute.manager [-] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.192320] env[62585]: DEBUG nova.network.neutron [-] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 576.210023] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Successfully created port: aa786168-9bf7-46ad-957b-c6a634d81e7f {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.223938] env[62585]: DEBUG nova.network.neutron [-] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.260061] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 576.671166] env[62585]: DEBUG nova.network.neutron [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.733266] env[62585]: DEBUG nova.network.neutron [-] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.749941] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f4d649-227d-4fea-aa37-e504a50e6a0c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.758664] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c024bb-dec1-43a7-b04c-cc08c8596870 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.801191] env[62585]: DEBUG nova.network.neutron [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.802882] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efcb3b8-5216-4b52-b591-498187b179a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.810974] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc27d71d-301c-4532-855e-95bd3d468294 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.826296] env[62585]: DEBUG nova.compute.provider_tree [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.117261] env[62585]: INFO nova.compute.manager [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Rebuilding instance [ 577.186033] env[62585]: DEBUG nova.compute.manager [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 577.186033] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0aec830-1619-486a-9867-c7418ebf0e91 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.239437] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 577.243022] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 577.243022] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.243022] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 577.243022] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.243022] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 577.243022] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 577.243573] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 577.243573] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 577.243573] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 577.243573] env[62585]: DEBUG nova.virt.hardware [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 577.243573] env[62585]: INFO nova.compute.manager [-] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Took 1.05 seconds to deallocate network for instance. [ 577.243573] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6176afc-0c21-4e37-ad28-d920b83aeb9b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.248103] env[62585]: DEBUG nova.compute.claims [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 577.248707] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.252778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83d2eb2-4ed6-4980-8983-fd207b8505c3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.270191] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 577.276958] env[62585]: DEBUG oslo.service.loopingcall [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.280251] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 577.280251] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc3727ca-d86b-4e81-9894-c2359cb8ed1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.296136] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 577.296136] env[62585]: value = "task-1384632" [ 577.296136] env[62585]: _type = "Task" [ 577.296136] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.306105] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 577.309287] env[62585]: DEBUG oslo_concurrency.lockutils [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] Releasing lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.309820] env[62585]: DEBUG nova.compute.manager [req-7dc2b4f0-2012-4413-8cd1-7b7f33cc9bdb req-48889cd2-ca55-4512-9165-7442fc11ff76 service nova] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Received event network-vif-deleted-43ce9572-81e6-47d4-9fc6-74bcf320e382 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 577.315070] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384632, 'name': CreateVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.328711] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 577.328952] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 577.329125] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.329298] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 577.329442] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.329584] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 577.329791] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 577.329933] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 577.330122] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 577.330286] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 577.330451] env[62585]: DEBUG nova.virt.hardware [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 577.331328] env[62585]: DEBUG nova.scheduler.client.report [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 577.334764] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bab42b-ab81-4252-9c3f-128547138d56 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.346822] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f03d69-4399-4072-9ead-9eb5365ae435 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.696356] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 577.696698] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7574a76a-5da5-4f8e-b4a1-081bd99abf6d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.704496] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 577.704496] env[62585]: value = "task-1384633" [ 577.704496] env[62585]: _type = "Task" [ 577.704496] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.718163] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.810990] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384632, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.839208] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.839863] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 577.847019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.991s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.975257] env[62585]: DEBUG nova.compute.manager [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Received event network-changed-aa786168-9bf7-46ad-957b-c6a634d81e7f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 577.975257] env[62585]: DEBUG nova.compute.manager [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Refreshing instance network info cache due to event network-changed-aa786168-9bf7-46ad-957b-c6a634d81e7f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 577.977763] env[62585]: DEBUG oslo_concurrency.lockutils [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] Acquiring lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.977763] env[62585]: DEBUG oslo_concurrency.lockutils [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] Acquired lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.977763] env[62585]: DEBUG nova.network.neutron [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Refreshing network info cache for port aa786168-9bf7-46ad-957b-c6a634d81e7f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 578.220119] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384633, 'name': PowerOffVM_Task, 'duration_secs': 0.132672} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.220119] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 578.220119] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 578.221864] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c7f146-e530-4676-a43b-e17a64f25c0f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.229620] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 578.230077] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c38cc34-f5b4-4641-9448-60bd54dbccf7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.262429] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 578.262687] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 578.262734] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Deleting the datastore file [datastore1] dd387320-7101-440c-80bc-a7d19a654df8 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 578.263036] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-626f3a78-eb6f-4afd-a175-5e37ebbb5341 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.269749] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 578.269749] env[62585]: value = "task-1384635" [ 578.269749] env[62585]: _type = "Task" [ 578.269749] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.278933] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.306814] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384632, 'name': CreateVM_Task, 'duration_secs': 0.633144} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.307966] env[62585]: ERROR nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 578.307966] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 578.307966] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 578.307966] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 578.307966] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 578.307966] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 578.307966] env[62585]: ERROR nova.compute.manager raise self.value [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 578.307966] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 578.307966] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 578.307966] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 578.308529] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 578.308529] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 578.308529] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 578.308529] env[62585]: ERROR nova.compute.manager [ 578.308529] env[62585]: Traceback (most recent call last): [ 578.308529] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 578.308529] env[62585]: listener.cb(fileno) [ 578.308529] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 578.308529] env[62585]: result = function(*args, **kwargs) [ 578.308529] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 578.308529] env[62585]: return func(*args, **kwargs) [ 578.308529] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 578.308529] env[62585]: raise e [ 578.308529] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 578.308529] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 578.308529] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 578.308529] env[62585]: created_port_ids = self._update_ports_for_instance( [ 578.308529] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 578.308529] env[62585]: with excutils.save_and_reraise_exception(): [ 578.308529] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 578.308529] env[62585]: self.force_reraise() [ 578.308529] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 578.308529] env[62585]: raise self.value [ 578.308529] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 578.308529] env[62585]: updated_port = self._update_port( [ 578.308529] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 578.308529] env[62585]: _ensure_no_port_binding_failure(port) [ 578.308529] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 578.308529] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 578.309664] env[62585]: nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 578.309664] env[62585]: Removing descriptor: 17 [ 578.309664] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 578.311763] env[62585]: ERROR nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Traceback (most recent call last): [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] yield resources [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self.driver.spawn(context, instance, image_meta, [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] vm_ref = self.build_virtual_machine(instance, [ 578.311763] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] for vif in network_info: [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return self._sync_wrapper(fn, *args, **kwargs) [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self.wait() [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self[:] = self._gt.wait() [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return self._exit_event.wait() [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 578.312207] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] result = hub.switch() [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return self.greenlet.switch() [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] result = function(*args, **kwargs) [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return func(*args, **kwargs) [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] raise e [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] nwinfo = self.network_api.allocate_for_instance( [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] created_port_ids = self._update_ports_for_instance( [ 578.312660] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] with excutils.save_and_reraise_exception(): [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self.force_reraise() [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] raise self.value [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] updated_port = self._update_port( [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] _ensure_no_port_binding_failure(port) [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] raise exception.PortBindingFailed(port_id=port['id']) [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 578.313109] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] [ 578.313551] env[62585]: INFO nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Terminating instance [ 578.313551] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.313551] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.313551] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 578.313996] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquiring lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.313996] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27ad499c-7462-463d-bcbc-8faf1bfdc49d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.319692] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 578.319692] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522bdcf5-bca4-5349-5c3e-c39fe2bc17d7" [ 578.319692] env[62585]: _type = "Task" [ 578.319692] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.330394] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522bdcf5-bca4-5349-5c3e-c39fe2bc17d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.354943] env[62585]: DEBUG nova.compute.utils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 578.361989] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 578.362221] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 578.412514] env[62585]: DEBUG nova.policy [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '009de68f3d9d4522b5ff93c78ae35a47', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb0e3bce48434983a36a44fce01f9078', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 578.514701] env[62585]: DEBUG nova.network.neutron [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.732948] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Successfully created port: 0c82d411-d593-4a34-a6dd-f5f0281b5d53 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 578.751577] env[62585]: DEBUG nova.network.neutron [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.785093] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254544} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.785388] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 578.785541] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 578.785862] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 578.794316] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70e1897-80e3-4603-82b9-95efbf6a8e10 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.803325] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fda412f-1ad2-42e9-adc1-442ff110a91c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.848661] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98db3506-3b7e-45cd-acba-4a55bb74814d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.858281] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522bdcf5-bca4-5349-5c3e-c39fe2bc17d7, 'name': SearchDatastore_Task, 'duration_secs': 0.019956} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.860206] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.860304] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.860576] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.860655] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.860864] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 578.861098] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d41efd1f-2df6-468f-91b0-c644a8ccd908 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.863877] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839e690f-cd15-40c2-9ccf-00702d0f02a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.870103] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 578.885701] env[62585]: DEBUG nova.compute.provider_tree [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.888383] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 578.888439] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 578.889363] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b3b2cbb-d048-467e-a102-7ff6664b90ef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.894717] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 578.894717] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e5b436-be89-4780-c585-c12406415259" [ 578.894717] env[62585]: _type = "Task" [ 578.894717] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.904829] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e5b436-be89-4780-c585-c12406415259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.257668] env[62585]: DEBUG oslo_concurrency.lockutils [req-ab34502c-6e92-44cd-be80-89edac7ee55b req-4846dd78-da34-4a44-8210-96d73b1515f6 service nova] Releasing lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.258175] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquired lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.259244] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.391426] env[62585]: DEBUG nova.scheduler.client.report [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.411020] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e5b436-be89-4780-c585-c12406415259, 'name': SearchDatastore_Task, 'duration_secs': 0.008461} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.411020] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2fe0ea8-3a31-4979-8cce-964e3e3e7cfe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.419404] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 579.419404] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c77f59-8d61-4a09-f547-5aa340cb8063" [ 579.419404] env[62585]: _type = "Task" [ 579.419404] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.432648] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c77f59-8d61-4a09-f547-5aa340cb8063, 'name': SearchDatastore_Task, 'duration_secs': 0.008334} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.433016] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.433295] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 579.433546] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e90d68d-1cd3-4af2-ae7d-6600517f8da0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.445750] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 579.445750] env[62585]: value = "task-1384636" [ 579.445750] env[62585]: _type = "Task" [ 579.445750] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.452463] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.813681] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.822821] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.823068] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.823221] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.823424] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.823577] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.823752] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.824323] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.824589] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.827200] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.827200] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.827200] env[62585]: DEBUG nova.virt.hardware [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.827200] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff87a70-d5fb-4c6d-8f0d-fa91b1465ebc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.841070] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdc38b2-c3f0-453f-8fa5-170094ed9b31 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.860580] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.866427] env[62585]: DEBUG oslo.service.loopingcall [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 579.866745] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 579.867015] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9a73349-367b-477e-a1a1-e7465efb1b8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.882573] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 579.893250] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 579.893250] env[62585]: value = "task-1384637" [ 579.893250] env[62585]: _type = "Task" [ 579.893250] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.904864] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.059s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.905567] env[62585]: ERROR nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Traceback (most recent call last): [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self.driver.spawn(context, instance, image_meta, [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] vm_ref = self.build_virtual_machine(instance, [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] vif_infos = vmwarevif.get_vif_info(self._session, [ 579.905567] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] for vif in network_info: [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return self._sync_wrapper(fn, *args, **kwargs) [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self.wait() [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self[:] = self._gt.wait() [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return self._exit_event.wait() [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] result = hub.switch() [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 579.905926] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return self.greenlet.switch() [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] result = function(*args, **kwargs) [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] return func(*args, **kwargs) [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] raise e [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] nwinfo = self.network_api.allocate_for_instance( [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] created_port_ids = self._update_ports_for_instance( [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] with excutils.save_and_reraise_exception(): [ 579.906422] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] self.force_reraise() [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] raise self.value [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] updated_port = self._update_port( [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] _ensure_no_port_binding_failure(port) [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] raise exception.PortBindingFailed(port_id=port['id']) [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] nova.exception.PortBindingFailed: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. [ 579.906800] env[62585]: ERROR nova.compute.manager [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] [ 579.907136] env[62585]: DEBUG nova.compute.utils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 579.907611] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384637, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.910299] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.387s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.911395] env[62585]: INFO nova.compute.claims [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.914978] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Build of instance 1531ed40-29c2-4812-afd5-eabffe22f4ea was re-scheduled: Binding failed for port bf826877-c169-4efd-a6cc-e1340c279cf4, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 579.914978] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 579.915152] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquiring lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.915152] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Acquired lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.918971] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.923354] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.923546] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.923692] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.923946] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.924175] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.924175] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.924372] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.924521] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.924677] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.924844] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.924991] env[62585]: DEBUG nova.virt.hardware [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.926281] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f26a06-2c0b-4600-a77f-cb7c9e6d9903 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.937483] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4be231-8bf8-4e7b-9109-7a827cc9a43b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.966813] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384636, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494031} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.966813] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 579.966813] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 579.966813] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56cfa778-46be-4d58-900d-1f73c1e9dfc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.974401] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 579.974401] env[62585]: value = "task-1384638" [ 579.974401] env[62585]: _type = "Task" [ 579.974401] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.987202] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.004088] env[62585]: ERROR nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 580.004088] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.004088] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.004088] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.004088] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.004088] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.004088] env[62585]: ERROR nova.compute.manager raise self.value [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.004088] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 580.004088] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.004088] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 580.004731] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.004731] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 580.004731] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 580.004731] env[62585]: ERROR nova.compute.manager [ 580.004731] env[62585]: Traceback (most recent call last): [ 580.004731] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 580.004731] env[62585]: listener.cb(fileno) [ 580.004731] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 580.004731] env[62585]: result = function(*args, **kwargs) [ 580.004731] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 580.004731] env[62585]: return func(*args, **kwargs) [ 580.004731] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 580.004731] env[62585]: raise e [ 580.004731] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.004731] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 580.004731] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.004731] env[62585]: created_port_ids = self._update_ports_for_instance( [ 580.004731] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.004731] env[62585]: with excutils.save_and_reraise_exception(): [ 580.004731] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.004731] env[62585]: self.force_reraise() [ 580.004731] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.004731] env[62585]: raise self.value [ 580.004731] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.004731] env[62585]: updated_port = self._update_port( [ 580.004731] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.004731] env[62585]: _ensure_no_port_binding_failure(port) [ 580.004731] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.004731] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 580.005668] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 580.005668] env[62585]: Removing descriptor: 17 [ 580.005668] env[62585]: ERROR nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Traceback (most recent call last): [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] yield resources [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self.driver.spawn(context, instance, image_meta, [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self._vmops.spawn(context, instance, image_meta, injected_files, [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 580.005668] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] vm_ref = self.build_virtual_machine(instance, [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] vif_infos = vmwarevif.get_vif_info(self._session, [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] for vif in network_info: [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return self._sync_wrapper(fn, *args, **kwargs) [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self.wait() [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self[:] = self._gt.wait() [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return self._exit_event.wait() [ 580.006072] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] result = hub.switch() [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return self.greenlet.switch() [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] result = function(*args, **kwargs) [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return func(*args, **kwargs) [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] raise e [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] nwinfo = self.network_api.allocate_for_instance( [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.006651] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] created_port_ids = self._update_ports_for_instance( [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] with excutils.save_and_reraise_exception(): [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self.force_reraise() [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] raise self.value [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] updated_port = self._update_port( [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] _ensure_no_port_binding_failure(port) [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.007493] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] raise exception.PortBindingFailed(port_id=port['id']) [ 580.008314] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 580.008314] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] [ 580.008314] env[62585]: INFO nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Terminating instance [ 580.008314] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquiring lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.008314] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquired lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.008314] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 580.056857] env[62585]: DEBUG nova.compute.manager [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Received event network-vif-deleted-aa786168-9bf7-46ad-957b-c6a634d81e7f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 580.056857] env[62585]: DEBUG nova.compute.manager [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Received event network-changed-0c82d411-d593-4a34-a6dd-f5f0281b5d53 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 580.056857] env[62585]: DEBUG nova.compute.manager [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Refreshing instance network info cache due to event network-changed-0c82d411-d593-4a34-a6dd-f5f0281b5d53. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 580.057039] env[62585]: DEBUG oslo_concurrency.lockutils [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] Acquiring lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.077738] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.404460] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384637, 'name': CreateVM_Task, 'duration_secs': 0.272362} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.404729] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 580.408618] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.408850] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.409218] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 580.409540] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d7e8b49-acd4-46c8-9fce-73b504644e5a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.414527] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 580.414527] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527f76a9-bb9d-30a2-0bfa-4b45434e213a" [ 580.414527] env[62585]: _type = "Task" [ 580.414527] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.425457] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527f76a9-bb9d-30a2-0bfa-4b45434e213a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.441518] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.484313] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061858} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.485955] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 580.485955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373bc3e1-469e-4bcb-a138-e215c83ac98c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.510331] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 580.514756] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2edfd12-c54f-44d5-b9e5-12e7185c3d62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.535692] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 580.535692] env[62585]: value = "task-1384639" [ 580.535692] env[62585]: _type = "Task" [ 580.535692] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.544087] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384639, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.544879] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.580186] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Releasing lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.580617] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 580.580812] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 580.581111] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a6dd1dd-1303-4579-9be5-a0d138206a81 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.584831] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.590566] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040b7b79-4987-40b6-85de-aa0360e939a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.616682] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 971d6e19-044b-4af8-b6c3-12b617cc24fe could not be found. [ 580.616917] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 580.617117] env[62585]: INFO nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 580.617383] env[62585]: DEBUG oslo.service.loopingcall [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 580.617665] env[62585]: DEBUG nova.compute.manager [-] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 580.617665] env[62585]: DEBUG nova.network.neutron [-] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 580.633395] env[62585]: DEBUG nova.network.neutron [-] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.843158] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.936964] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "4557a853-232e-49e5-9052-ebf54d68e998" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.937160] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "4557a853-232e-49e5-9052-ebf54d68e998" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.938178] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527f76a9-bb9d-30a2-0bfa-4b45434e213a, 'name': SearchDatastore_Task, 'duration_secs': 0.026342} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.938178] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.938178] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 580.938357] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.938474] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.938661] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 580.939207] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-245e7121-9c24-457b-99cc-e2f77a691a6c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.948501] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 580.948710] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 580.949389] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b0facc-def2-4923-bfe9-aa5d38319ec8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.960024] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 580.960024] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524d5588-60e7-877c-5f94-b9b9fcffad15" [ 580.960024] env[62585]: _type = "Task" [ 580.960024] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.968347] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524d5588-60e7-877c-5f94-b9b9fcffad15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.050175] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384639, 'name': ReconfigVM_Task, 'duration_secs': 0.2704} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.050175] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 149bd77b-9583-42e5-8c82-f795cac53b87/149bd77b-9583-42e5-8c82-f795cac53b87.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 581.050567] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70994569-caa0-49df-86ee-4f6cdf25c013 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.057590] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 581.057590] env[62585]: value = "task-1384640" [ 581.057590] env[62585]: _type = "Task" [ 581.057590] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.076439] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384640, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.087928] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Releasing lock "refresh_cache-1531ed40-29c2-4812-afd5-eabffe22f4ea" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.088475] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 581.088685] env[62585]: DEBUG nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 581.089206] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 581.131311] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.135905] env[62585]: DEBUG nova.network.neutron [-] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.332752] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "72cdccb7-b398-4833-af82-d64222c83f8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.332752] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "72cdccb7-b398-4833-af82-d64222c83f8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.345578] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Releasing lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.345884] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 581.346084] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 581.346505] env[62585]: DEBUG oslo_concurrency.lockutils [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] Acquired lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.346596] env[62585]: DEBUG nova.network.neutron [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Refreshing network info cache for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 581.347665] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-897eda60-db3f-4523-bdae-a5dcb1afb73c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.359650] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c29d7bc-9065-4241-a820-a14ca8652629 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.385135] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 68b4ca9d-f934-4b44-8c34-0b1bfb848672 could not be found. [ 581.385393] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.385560] env[62585]: INFO nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Took 0.04 seconds to destroy the instance on the hypervisor. [ 581.385800] env[62585]: DEBUG oslo.service.loopingcall [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.389319] env[62585]: DEBUG nova.compute.manager [-] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 581.389435] env[62585]: DEBUG nova.network.neutron [-] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 581.416842] env[62585]: DEBUG nova.network.neutron [-] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.434079] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90d24f1-9334-4ebe-ad4b-cef33289cb11 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.440143] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f7df5f-1fd4-44ca-a446-082b863c8dc2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.476856] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fbccdf-de00-4304-a637-e644b31e9880 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.486462] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524d5588-60e7-877c-5f94-b9b9fcffad15, 'name': SearchDatastore_Task, 'duration_secs': 0.008423} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.489302] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83beaccb-e588-40ee-b8bc-06e75fba2f45 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.492381] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138c519a-dc12-4d04-9d0c-848442a186e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.499481] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 581.499481] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c039aa-2c90-4d3a-0880-35128a04c2a3" [ 581.499481] env[62585]: _type = "Task" [ 581.499481] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.509224] env[62585]: DEBUG nova.compute.provider_tree [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.518202] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c039aa-2c90-4d3a-0880-35128a04c2a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.567450] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384640, 'name': Rename_Task, 'duration_secs': 0.146693} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.567717] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 581.567958] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c04d9d9-8297-4630-86b5-fbfa441d2881 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.575062] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 581.575062] env[62585]: value = "task-1384641" [ 581.575062] env[62585]: _type = "Task" [ 581.575062] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.583534] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.634212] env[62585]: DEBUG nova.network.neutron [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.638671] env[62585]: INFO nova.compute.manager [-] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Took 1.02 seconds to deallocate network for instance. [ 581.640718] env[62585]: DEBUG nova.compute.claims [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 581.640882] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.881515] env[62585]: DEBUG nova.network.neutron [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.919478] env[62585]: DEBUG nova.network.neutron [-] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.015072] env[62585]: DEBUG nova.scheduler.client.report [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 582.030601] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c039aa-2c90-4d3a-0880-35128a04c2a3, 'name': SearchDatastore_Task, 'duration_secs': 0.021789} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.030601] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.030601] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 582.030601] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef27a7d6-7eef-4b8a-b738-e36d6d6f13b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.038542] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 582.038542] env[62585]: value = "task-1384642" [ 582.038542] env[62585]: _type = "Task" [ 582.038542] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.050628] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.081418] env[62585]: DEBUG nova.network.neutron [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.109264] env[62585]: DEBUG oslo_vmware.api [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384641, 'name': PowerOnVM_Task, 'duration_secs': 0.455458} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.109264] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 582.109264] env[62585]: DEBUG nova.compute.manager [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 582.109264] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3a5034-ac98-49db-ab37-dae0700c1774 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.139864] env[62585]: INFO nova.compute.manager [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] [instance: 1531ed40-29c2-4812-afd5-eabffe22f4ea] Took 1.05 seconds to deallocate network for instance. [ 582.426896] env[62585]: INFO nova.compute.manager [-] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Took 1.04 seconds to deallocate network for instance. [ 582.431776] env[62585]: DEBUG nova.compute.claims [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 582.431776] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.441908] env[62585]: DEBUG nova.compute.manager [req-16df97ba-679a-4141-a0a2-32af3ef90193 req-2460b469-f40e-40c4-a958-87d686589c3d service nova] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Received event network-vif-deleted-0c82d411-d593-4a34-a6dd-f5f0281b5d53 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 582.521994] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.524613] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 582.526082] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.943s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.530018] env[62585]: INFO nova.compute.claims [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.555429] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507649} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.555574] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 582.556114] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 582.556114] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1b00879-4c70-4f36-99b5-a7c9eb489b19 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.562672] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 582.562672] env[62585]: value = "task-1384643" [ 582.562672] env[62585]: _type = "Task" [ 582.562672] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.571874] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384643, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.584066] env[62585]: DEBUG oslo_concurrency.lockutils [req-5b778959-7881-4ada-bf0a-d4ebc62d0cb3 req-a399561f-44fa-4884-a309-64f85a91582f service nova] Releasing lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.621501] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.029118] env[62585]: DEBUG nova.compute.utils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 583.030785] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 583.031072] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 583.043497] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "53e10c33-0f41-48a2-ac19-c0b34a9a9312" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.043825] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "53e10c33-0f41-48a2-ac19-c0b34a9a9312" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.072405] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384643, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060001} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.072878] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 583.073716] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a0b4ef-2a1e-4623-90b0-3184d690b37d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.100178] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 583.102011] env[62585]: DEBUG nova.policy [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4646b4c0347b4164a058e9542e161187', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52b2f6e506d0420e928bdeb9ce8710d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 583.103408] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c49acc39-349d-4719-abe6-e2546d7528d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.125768] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 583.125768] env[62585]: value = "task-1384644" [ 583.125768] env[62585]: _type = "Task" [ 583.125768] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.135866] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384644, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.185054] env[62585]: INFO nova.scheduler.client.report [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Deleted allocations for instance 1531ed40-29c2-4812-afd5-eabffe22f4ea [ 583.235760] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "149bd77b-9583-42e5-8c82-f795cac53b87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.238042] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "149bd77b-9583-42e5-8c82-f795cac53b87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.238042] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "149bd77b-9583-42e5-8c82-f795cac53b87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.238042] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "149bd77b-9583-42e5-8c82-f795cac53b87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.238042] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "149bd77b-9583-42e5-8c82-f795cac53b87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.239762] env[62585]: INFO nova.compute.manager [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Terminating instance [ 583.242545] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "refresh_cache-149bd77b-9583-42e5-8c82-f795cac53b87" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.242545] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquired lock "refresh_cache-149bd77b-9583-42e5-8c82-f795cac53b87" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.242545] env[62585]: DEBUG nova.network.neutron [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.459673] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Successfully created port: 343feb77-bd53-4d00-99d7-b144412773b8 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.536151] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 583.638350] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384644, 'name': ReconfigVM_Task, 'duration_secs': 0.254738} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.638622] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Reconfigured VM instance instance-0000000b to attach disk [datastore2] dd387320-7101-440c-80bc-a7d19a654df8/dd387320-7101-440c-80bc-a7d19a654df8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 583.639226] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a32b2dc4-2ea5-4021-b538-32f069734666 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.646137] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 583.646137] env[62585]: value = "task-1384645" [ 583.646137] env[62585]: _type = "Task" [ 583.646137] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.656195] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384645, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.692860] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2e4732d-cbea-487e-9d72-5bd22f2d7b0f tempest-FloatingIPsAssociationTestJSON-2053329763 tempest-FloatingIPsAssociationTestJSON-2053329763-project-member] Lock "1531ed40-29c2-4812-afd5-eabffe22f4ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.274s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.767671] env[62585]: DEBUG nova.network.neutron [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.864670] env[62585]: DEBUG nova.network.neutron [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.007019] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2d95ca-2d23-4f39-b244-88deab51eaca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.014583] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fbfa87-4503-4719-8838-e5ca7dc988bf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.057240] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c54667-8e85-4b50-9104-de9e95c677d8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.064277] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70192689-1914-438b-b148-a094828ef244 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.080021] env[62585]: DEBUG nova.compute.provider_tree [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.159154] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384645, 'name': Rename_Task, 'duration_secs': 0.155759} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.159436] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 584.159686] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-009abd38-76db-462a-9f48-3c5acb06868c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.167192] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Waiting for the task: (returnval){ [ 584.167192] env[62585]: value = "task-1384646" [ 584.167192] env[62585]: _type = "Task" [ 584.167192] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.175503] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384646, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.196196] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 584.374593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Releasing lock "refresh_cache-149bd77b-9583-42e5-8c82-f795cac53b87" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.374593] env[62585]: DEBUG nova.compute.manager [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 584.374593] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.375931] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d9b1a4-326c-45f4-acb9-7bf594ba1225 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.391689] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 584.391689] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7da628a9-4149-4711-86ac-509f5df623e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.398355] env[62585]: DEBUG oslo_vmware.api [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 584.398355] env[62585]: value = "task-1384647" [ 584.398355] env[62585]: _type = "Task" [ 584.398355] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.407203] env[62585]: DEBUG oslo_vmware.api [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.558717] env[62585]: DEBUG nova.compute.manager [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Received event network-changed-343feb77-bd53-4d00-99d7-b144412773b8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 584.558717] env[62585]: DEBUG nova.compute.manager [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Refreshing instance network info cache due to event network-changed-343feb77-bd53-4d00-99d7-b144412773b8. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 584.558717] env[62585]: DEBUG oslo_concurrency.lockutils [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] Acquiring lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.560752] env[62585]: DEBUG oslo_concurrency.lockutils [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] Acquired lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.560752] env[62585]: DEBUG nova.network.neutron [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Refreshing network info cache for port 343feb77-bd53-4d00-99d7-b144412773b8 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 584.563161] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 584.583679] env[62585]: DEBUG nova.scheduler.client.report [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.611995] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 584.612286] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 584.612451] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.612634] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 584.613653] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.613653] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 584.613653] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 584.613653] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 584.614107] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 584.614107] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 584.614280] env[62585]: DEBUG nova.virt.hardware [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 584.617701] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f55c75-1ae0-4614-bba1-6ba53d2157e3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.626170] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1003f5d6-4c92-4097-a959-96783ca679ce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.681066] env[62585]: DEBUG oslo_vmware.api [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Task: {'id': task-1384646, 'name': PowerOnVM_Task, 'duration_secs': 0.455108} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.681066] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 584.681066] env[62585]: DEBUG nova.compute.manager [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 584.681066] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a46b53-5e91-4a06-80b9-4c05b116730d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.694982] env[62585]: ERROR nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 584.694982] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.694982] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.694982] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.694982] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.694982] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.694982] env[62585]: ERROR nova.compute.manager raise self.value [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.694982] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 584.694982] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.694982] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 584.695478] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.695478] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 584.695478] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 584.695478] env[62585]: ERROR nova.compute.manager [ 584.695478] env[62585]: Traceback (most recent call last): [ 584.695478] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 584.695478] env[62585]: listener.cb(fileno) [ 584.695478] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.695478] env[62585]: result = function(*args, **kwargs) [ 584.695478] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.695478] env[62585]: return func(*args, **kwargs) [ 584.695478] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.695478] env[62585]: raise e [ 584.695478] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.695478] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 584.695478] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.695478] env[62585]: created_port_ids = self._update_ports_for_instance( [ 584.695478] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.695478] env[62585]: with excutils.save_and_reraise_exception(): [ 584.695478] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.695478] env[62585]: self.force_reraise() [ 584.695478] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.695478] env[62585]: raise self.value [ 584.695478] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.695478] env[62585]: updated_port = self._update_port( [ 584.695478] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.695478] env[62585]: _ensure_no_port_binding_failure(port) [ 584.695478] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.695478] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 584.696106] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 584.696106] env[62585]: Removing descriptor: 17 [ 584.696686] env[62585]: ERROR nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Traceback (most recent call last): [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] yield resources [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self.driver.spawn(context, instance, image_meta, [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] vm_ref = self.build_virtual_machine(instance, [ 584.696686] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] for vif in network_info: [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return self._sync_wrapper(fn, *args, **kwargs) [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self.wait() [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self[:] = self._gt.wait() [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return self._exit_event.wait() [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.696972] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] result = hub.switch() [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return self.greenlet.switch() [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] result = function(*args, **kwargs) [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return func(*args, **kwargs) [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] raise e [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] nwinfo = self.network_api.allocate_for_instance( [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] created_port_ids = self._update_ports_for_instance( [ 584.697297] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] with excutils.save_and_reraise_exception(): [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self.force_reraise() [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] raise self.value [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] updated_port = self._update_port( [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] _ensure_no_port_binding_failure(port) [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] raise exception.PortBindingFailed(port_id=port['id']) [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 584.697577] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] [ 584.697856] env[62585]: INFO nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Terminating instance [ 584.703309] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquiring lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.730813] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.914338] env[62585]: DEBUG oslo_vmware.api [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384647, 'name': PowerOffVM_Task, 'duration_secs': 0.104231} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.914617] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 584.914868] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 584.915071] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1d5d620-1f1b-449a-8eec-762e2327f6f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.939118] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 584.939347] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 584.939541] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Deleting the datastore file [datastore2] 149bd77b-9583-42e5-8c82-f795cac53b87 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 584.939769] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be34cf3d-cd03-4646-ad08-c0b08a08d90c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.945487] env[62585]: DEBUG oslo_vmware.api [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for the task: (returnval){ [ 584.945487] env[62585]: value = "task-1384649" [ 584.945487] env[62585]: _type = "Task" [ 584.945487] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.954402] env[62585]: DEBUG oslo_vmware.api [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.086442] env[62585]: DEBUG nova.network.neutron [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.094433] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.094932] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 585.100764] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.026s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.102162] env[62585]: INFO nova.compute.claims [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.202800] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.213056] env[62585]: DEBUG nova.network.neutron [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.455516] env[62585]: DEBUG oslo_vmware.api [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Task: {'id': task-1384649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091613} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.455760] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 585.456178] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 585.456412] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 585.456604] env[62585]: INFO nova.compute.manager [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Took 1.08 seconds to destroy the instance on the hypervisor. [ 585.456968] env[62585]: DEBUG oslo.service.loopingcall [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.457259] env[62585]: DEBUG nova.compute.manager [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 585.457371] env[62585]: DEBUG nova.network.neutron [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 585.476206] env[62585]: DEBUG nova.network.neutron [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.608176] env[62585]: DEBUG nova.compute.utils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.615026] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 585.615026] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 585.663900] env[62585]: DEBUG nova.policy [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '531790be6166473ba871e20fb44a4acd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8c58c2c6ec14dae9f1fe2e472c6b394', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 585.716989] env[62585]: DEBUG oslo_concurrency.lockutils [req-be932118-c276-4313-b8f1-35969b30c6c5 req-254ee299-64cf-4f3f-b590-0dddc6efb7a7 service nova] Releasing lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.717417] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquired lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.717591] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.813394] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "dd387320-7101-440c-80bc-a7d19a654df8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.813700] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "dd387320-7101-440c-80bc-a7d19a654df8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.813912] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "dd387320-7101-440c-80bc-a7d19a654df8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.814107] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "dd387320-7101-440c-80bc-a7d19a654df8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.814277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "dd387320-7101-440c-80bc-a7d19a654df8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.816315] env[62585]: INFO nova.compute.manager [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Terminating instance [ 585.817842] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "refresh_cache-dd387320-7101-440c-80bc-a7d19a654df8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.818020] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquired lock "refresh_cache-dd387320-7101-440c-80bc-a7d19a654df8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.818166] env[62585]: DEBUG nova.network.neutron [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.978946] env[62585]: DEBUG nova.network.neutron [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.112526] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 586.143792] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Successfully created port: 3ca9cf03-5a5a-4d88-beab-3e0e6badf163 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.238997] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.344676] env[62585]: DEBUG nova.network.neutron [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.368816] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.416069] env[62585]: DEBUG nova.network.neutron [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.481980] env[62585]: INFO nova.compute.manager [-] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Took 1.02 seconds to deallocate network for instance. [ 586.560339] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e365ecf3-a41e-40c5-bff6-562c6bac79ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.567302] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e5d267-b963-4696-a214-584f9dcda489 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.601041] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813b5847-8e68-42cc-8096-5ae501d66a23 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.604879] env[62585]: DEBUG nova.compute.manager [req-902447c5-50dc-43ba-8ebe-1648d6e7bf4a req-bdf2684f-9f48-4209-8e39-1917069540aa service nova] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Received event network-vif-deleted-343feb77-bd53-4d00-99d7-b144412773b8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 586.610856] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2997a60f-418e-4cc7-bc9a-fc46d180ccd6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.627942] env[62585]: DEBUG nova.compute.provider_tree [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.871573] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Releasing lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.871998] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.872209] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.872534] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-090cf00f-87b7-4bc0-bfef-30b9bfe26a01 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.889720] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8510497c-e84a-40cc-a5b4-5ca73fe21fb5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.911751] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b7686890-0ee7-4c5e-85f5-90a5c5241950 could not be found. [ 586.911992] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.912830] env[62585]: INFO nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Took 0.04 seconds to destroy the instance on the hypervisor. [ 586.912830] env[62585]: DEBUG oslo.service.loopingcall [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.912830] env[62585]: DEBUG nova.compute.manager [-] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.912830] env[62585]: DEBUG nova.network.neutron [-] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.921362] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Releasing lock "refresh_cache-dd387320-7101-440c-80bc-a7d19a654df8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.921741] env[62585]: DEBUG nova.compute.manager [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.921914] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.922704] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b9fb0a-a6e5-45e4-9985-e556bf313268 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.927061] env[62585]: DEBUG nova.network.neutron [-] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.931786] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 586.931786] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a41550b-2588-4661-9c4a-d172e002decc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.938287] env[62585]: DEBUG oslo_vmware.api [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 586.938287] env[62585]: value = "task-1384650" [ 586.938287] env[62585]: _type = "Task" [ 586.938287] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.948223] env[62585]: DEBUG oslo_vmware.api [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.993756] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.128820] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 587.132659] env[62585]: DEBUG nova.scheduler.client.report [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 587.158024] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.158024] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.158195] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.158370] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.158706] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.158882] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.159153] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.159256] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.159430] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.159581] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.159751] env[62585]: DEBUG nova.virt.hardware [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.160844] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91681c2-682e-42cb-8a25-0f577f8a6f47 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.169345] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65c1684-68d0-4173-910b-1c882579d98c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.432635] env[62585]: DEBUG nova.network.neutron [-] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.453750] env[62585]: DEBUG oslo_vmware.api [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384650, 'name': PowerOffVM_Task, 'duration_secs': 0.139117} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.454320] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 587.454547] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 587.454846] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c4842c6-c4bc-46e8-802d-24b1aaa75d37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.482505] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 587.482765] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 587.482946] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleting the datastore file [datastore2] dd387320-7101-440c-80bc-a7d19a654df8 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.483212] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d10e66a3-dd0a-4332-8b9f-fa1d244a4b76 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.489280] env[62585]: DEBUG oslo_vmware.api [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for the task: (returnval){ [ 587.489280] env[62585]: value = "task-1384652" [ 587.489280] env[62585]: _type = "Task" [ 587.489280] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.496664] env[62585]: DEBUG oslo_vmware.api [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384652, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.548817] env[62585]: ERROR nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 587.548817] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.548817] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.548817] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.548817] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.548817] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.548817] env[62585]: ERROR nova.compute.manager raise self.value [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.548817] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 587.548817] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.548817] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 587.549206] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.549206] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 587.549206] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 587.549206] env[62585]: ERROR nova.compute.manager [ 587.549206] env[62585]: Traceback (most recent call last): [ 587.549206] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 587.549206] env[62585]: listener.cb(fileno) [ 587.549206] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.549206] env[62585]: result = function(*args, **kwargs) [ 587.549206] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 587.549206] env[62585]: return func(*args, **kwargs) [ 587.549206] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.549206] env[62585]: raise e [ 587.549206] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.549206] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 587.549206] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.549206] env[62585]: created_port_ids = self._update_ports_for_instance( [ 587.549206] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.549206] env[62585]: with excutils.save_and_reraise_exception(): [ 587.549206] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.549206] env[62585]: self.force_reraise() [ 587.549206] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.549206] env[62585]: raise self.value [ 587.549206] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.549206] env[62585]: updated_port = self._update_port( [ 587.549206] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.549206] env[62585]: _ensure_no_port_binding_failure(port) [ 587.549206] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.549206] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 587.549995] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 587.549995] env[62585]: Removing descriptor: 17 [ 587.549995] env[62585]: ERROR nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Traceback (most recent call last): [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] yield resources [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self.driver.spawn(context, instance, image_meta, [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.549995] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] vm_ref = self.build_virtual_machine(instance, [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] for vif in network_info: [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return self._sync_wrapper(fn, *args, **kwargs) [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self.wait() [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self[:] = self._gt.wait() [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return self._exit_event.wait() [ 587.550317] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] result = hub.switch() [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return self.greenlet.switch() [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] result = function(*args, **kwargs) [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return func(*args, **kwargs) [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] raise e [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] nwinfo = self.network_api.allocate_for_instance( [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.550584] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] created_port_ids = self._update_ports_for_instance( [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] with excutils.save_and_reraise_exception(): [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self.force_reraise() [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] raise self.value [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] updated_port = self._update_port( [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] _ensure_no_port_binding_failure(port) [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.550894] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] raise exception.PortBindingFailed(port_id=port['id']) [ 587.551194] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 587.551194] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] [ 587.551194] env[62585]: INFO nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Terminating instance [ 587.552149] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquiring lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.552303] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquired lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.552466] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.641803] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.641803] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 587.644090] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.380s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.645512] env[62585]: INFO nova.compute.claims [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.936596] env[62585]: INFO nova.compute.manager [-] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Took 1.02 seconds to deallocate network for instance. [ 587.939485] env[62585]: DEBUG nova.compute.claims [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 587.939666] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.000151] env[62585]: DEBUG oslo_vmware.api [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Task: {'id': task-1384652, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097626} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.001714] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 588.001714] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 588.001714] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 588.001714] env[62585]: INFO nova.compute.manager [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Took 1.08 seconds to destroy the instance on the hypervisor. [ 588.001714] env[62585]: DEBUG oslo.service.loopingcall [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.002045] env[62585]: DEBUG nova.compute.manager [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.002045] env[62585]: DEBUG nova.network.neutron [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 588.023281] env[62585]: DEBUG nova.network.neutron [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.080226] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.147690] env[62585]: DEBUG nova.compute.utils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.152657] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 588.152840] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 588.215441] env[62585]: DEBUG nova.policy [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f386f9f7fd84febb1c145284f6d6557', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9de0238ac67f4f23875c35a135c67dd1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 588.223604] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.527487] env[62585]: DEBUG nova.network.neutron [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.646917] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Successfully created port: 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.657524] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 588.690874] env[62585]: DEBUG nova.compute.manager [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Received event network-changed-3ca9cf03-5a5a-4d88-beab-3e0e6badf163 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 588.691406] env[62585]: DEBUG nova.compute.manager [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Refreshing instance network info cache due to event network-changed-3ca9cf03-5a5a-4d88-beab-3e0e6badf163. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 588.692050] env[62585]: DEBUG oslo_concurrency.lockutils [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] Acquiring lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.727555] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Releasing lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.727998] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 588.728237] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 588.728766] env[62585]: DEBUG oslo_concurrency.lockutils [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] Acquired lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.728931] env[62585]: DEBUG nova.network.neutron [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Refreshing network info cache for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 588.729884] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d05672a-f5fc-462c-9ee9-5693ced25622 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.741314] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29f6346-e6ae-42e3-8886-42317d99cc83 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.767477] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5177b4e2-e990-47e6-9f2b-156ca0ee8387 could not be found. [ 588.767697] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 588.767879] env[62585]: INFO nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Took 0.04 seconds to destroy the instance on the hypervisor. [ 588.768128] env[62585]: DEBUG oslo.service.loopingcall [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.770701] env[62585]: DEBUG nova.compute.manager [-] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.770797] env[62585]: DEBUG nova.network.neutron [-] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 588.791589] env[62585]: DEBUG nova.network.neutron [-] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.029904] env[62585]: INFO nova.compute.manager [-] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Took 1.03 seconds to deallocate network for instance. [ 589.113016] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c56818-9546-44b2-961e-0d2347543445 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.121287] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a05b512-3044-40c6-b436-4f43e72ed033 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.161822] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ee0fab-3457-4ea3-970c-92a4dd806efb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.174838] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348ba00d-bc56-4c33-9c68-82301ed68196 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.191776] env[62585]: DEBUG nova.compute.provider_tree [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.256622] env[62585]: DEBUG nova.network.neutron [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.294558] env[62585]: DEBUG nova.network.neutron [-] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.416155] env[62585]: DEBUG nova.network.neutron [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.546355] env[62585]: DEBUG nova.compute.manager [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Received event network-changed-7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 589.547019] env[62585]: DEBUG nova.compute.manager [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Refreshing instance network info cache due to event network-changed-7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 589.547019] env[62585]: DEBUG oslo_concurrency.lockutils [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] Acquiring lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.547019] env[62585]: DEBUG oslo_concurrency.lockutils [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] Acquired lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.549529] env[62585]: DEBUG nova.network.neutron [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Refreshing network info cache for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 589.551207] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.676382] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 589.700194] env[62585]: DEBUG nova.scheduler.client.report [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.707624] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.707858] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.708015] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.708418] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.708675] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.708805] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.709038] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.709197] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.709359] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.709517] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.709692] env[62585]: DEBUG nova.virt.hardware [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.710560] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf03d6e3-a6da-415b-928c-957bf0807293 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.719525] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce1bd36-9d19-491e-85c3-a1a559f54a7d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.736140] env[62585]: ERROR nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 589.736140] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 589.736140] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 589.736140] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 589.736140] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 589.736140] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 589.736140] env[62585]: ERROR nova.compute.manager raise self.value [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 589.736140] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 589.736140] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 589.736140] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 589.736565] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 589.736565] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 589.736565] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 589.736565] env[62585]: ERROR nova.compute.manager [ 589.736682] env[62585]: Traceback (most recent call last): [ 589.736737] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 589.736737] env[62585]: listener.cb(fileno) [ 589.736737] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 589.736737] env[62585]: result = function(*args, **kwargs) [ 589.736737] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 589.736737] env[62585]: return func(*args, **kwargs) [ 589.736737] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 589.736737] env[62585]: raise e [ 589.736737] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 589.736737] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 589.736967] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 589.736967] env[62585]: created_port_ids = self._update_ports_for_instance( [ 589.736967] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 589.736967] env[62585]: with excutils.save_and_reraise_exception(): [ 589.736967] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 589.736967] env[62585]: self.force_reraise() [ 589.736967] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 589.736967] env[62585]: raise self.value [ 589.736967] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 589.736967] env[62585]: updated_port = self._update_port( [ 589.736967] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 589.736967] env[62585]: _ensure_no_port_binding_failure(port) [ 589.736967] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 589.736967] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 589.736967] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 589.736967] env[62585]: Removing descriptor: 15 [ 589.738161] env[62585]: ERROR nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Traceback (most recent call last): [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] yield resources [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self.driver.spawn(context, instance, image_meta, [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] vm_ref = self.build_virtual_machine(instance, [ 589.738161] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] vif_infos = vmwarevif.get_vif_info(self._session, [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] for vif in network_info: [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return self._sync_wrapper(fn, *args, **kwargs) [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self.wait() [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self[:] = self._gt.wait() [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return self._exit_event.wait() [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 589.738514] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] result = hub.switch() [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return self.greenlet.switch() [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] result = function(*args, **kwargs) [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return func(*args, **kwargs) [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] raise e [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] nwinfo = self.network_api.allocate_for_instance( [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] created_port_ids = self._update_ports_for_instance( [ 589.738807] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] with excutils.save_and_reraise_exception(): [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self.force_reraise() [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] raise self.value [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] updated_port = self._update_port( [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] _ensure_no_port_binding_failure(port) [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] raise exception.PortBindingFailed(port_id=port['id']) [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 589.739114] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] [ 589.739405] env[62585]: INFO nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Terminating instance [ 589.741308] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquiring lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.797656] env[62585]: INFO nova.compute.manager [-] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Took 1.03 seconds to deallocate network for instance. [ 589.800071] env[62585]: DEBUG nova.compute.claims [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 589.800291] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.918371] env[62585]: DEBUG oslo_concurrency.lockutils [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] Releasing lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.918638] env[62585]: DEBUG nova.compute.manager [req-b3962923-7b7d-495e-82fd-2c4fb7a7263e req-193e4f42-fe6a-4d08-b026-b49b822f8cd6 service nova] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Received event network-vif-deleted-3ca9cf03-5a5a-4d88-beab-3e0e6badf163 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 590.085117] env[62585]: DEBUG nova.network.neutron [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.205889] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.206435] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 590.210812] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.774s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.212594] env[62585]: INFO nova.compute.claims [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.220568] env[62585]: DEBUG nova.network.neutron [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.721242] env[62585]: DEBUG nova.compute.utils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.724362] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 590.724362] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.724362] env[62585]: DEBUG oslo_concurrency.lockutils [req-970fdf94-dcd7-42fe-937b-b06bcdc12b15 req-89d63773-945b-4b39-bebe-55c8536a0995 service nova] Releasing lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.724362] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquired lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.724526] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.787989] env[62585]: DEBUG nova.policy [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '776a9dccc0e34eeb83199ec4ac715208', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2edd171e27e74e839736595cabc0a2fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 591.106475] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Successfully created port: 03242677-863c-408a-8d91-a5a2042277e5 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.226022] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 591.271940] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.367189] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.565555] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "424fc272-b4b9-4867-a083-b27abe308f81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.565796] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "424fc272-b4b9-4867-a083-b27abe308f81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.574811] env[62585]: DEBUG nova.compute.manager [req-4136c39b-4054-4655-8ccc-1d58f11aaea2 req-96a01877-f09a-44c4-9f8e-fbb03b91e8f9 service nova] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Received event network-vif-deleted-7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 591.671772] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16aa62a-08f4-4f9b-a369-8dfc53e84faa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.679418] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998c03da-024b-46cb-9a74-22a1f309a23c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.709725] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517c57c0-cf21-4cbe-9007-c8ab6a38eef5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.716999] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e56332-5a12-4aa1-a022-6dea90231f15 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.730024] env[62585]: DEBUG nova.compute.provider_tree [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.874794] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Releasing lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.875242] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 591.875487] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 591.875830] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9838f1d9-a138-484b-b60f-738059e53b1f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.886256] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c1bc46-d66c-4f1e-a15c-179df04cfb08 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.907306] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f2ca381-b4a3-47ce-b135-dbceb7e44d24 could not be found. [ 591.907532] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 591.907714] env[62585]: INFO nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Took 0.03 seconds to destroy the instance on the hypervisor. [ 591.907951] env[62585]: DEBUG oslo.service.loopingcall [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 591.908184] env[62585]: DEBUG nova.compute.manager [-] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 591.908277] env[62585]: DEBUG nova.network.neutron [-] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 591.924970] env[62585]: DEBUG nova.network.neutron [-] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.168789] env[62585]: ERROR nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 592.168789] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.168789] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.168789] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.168789] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.168789] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.168789] env[62585]: ERROR nova.compute.manager raise self.value [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.168789] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 592.168789] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.168789] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 592.169216] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.169216] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 592.169216] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 592.169216] env[62585]: ERROR nova.compute.manager [ 592.169216] env[62585]: Traceback (most recent call last): [ 592.169216] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 592.169216] env[62585]: listener.cb(fileno) [ 592.169216] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.169216] env[62585]: result = function(*args, **kwargs) [ 592.169216] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 592.169216] env[62585]: return func(*args, **kwargs) [ 592.169216] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.169216] env[62585]: raise e [ 592.169216] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.169216] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 592.169216] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.169216] env[62585]: created_port_ids = self._update_ports_for_instance( [ 592.169216] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.169216] env[62585]: with excutils.save_and_reraise_exception(): [ 592.169216] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.169216] env[62585]: self.force_reraise() [ 592.169216] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.169216] env[62585]: raise self.value [ 592.169216] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.169216] env[62585]: updated_port = self._update_port( [ 592.169216] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.169216] env[62585]: _ensure_no_port_binding_failure(port) [ 592.169216] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.169216] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 592.169899] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 592.169899] env[62585]: Removing descriptor: 15 [ 592.236866] env[62585]: DEBUG nova.scheduler.client.report [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.240767] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 592.272350] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.272603] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.272759] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.272935] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.273122] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.273302] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.273536] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.273695] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.273856] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.274022] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.274225] env[62585]: DEBUG nova.virt.hardware [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.275063] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f93cad7-19fd-4cf1-a46b-37b45cd1d130 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.284428] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097b9909-180a-41bc-b629-af93c3b60f57 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.298594] env[62585]: ERROR nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] Traceback (most recent call last): [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] yield resources [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self.driver.spawn(context, instance, image_meta, [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] vm_ref = self.build_virtual_machine(instance, [ 592.298594] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] for vif in network_info: [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] return self._sync_wrapper(fn, *args, **kwargs) [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self.wait() [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self[:] = self._gt.wait() [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] return self._exit_event.wait() [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 592.298939] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] current.throw(*self._exc) [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] result = function(*args, **kwargs) [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] return func(*args, **kwargs) [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] raise e [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] nwinfo = self.network_api.allocate_for_instance( [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] created_port_ids = self._update_ports_for_instance( [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] with excutils.save_and_reraise_exception(): [ 592.299273] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self.force_reraise() [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] raise self.value [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] updated_port = self._update_port( [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] _ensure_no_port_binding_failure(port) [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] raise exception.PortBindingFailed(port_id=port['id']) [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 592.299605] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] [ 592.299605] env[62585]: INFO nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Terminating instance [ 592.300893] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquiring lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.301541] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquired lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.301541] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 592.427724] env[62585]: DEBUG nova.network.neutron [-] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.744256] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.744793] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 592.747456] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.205s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.817740] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.906239] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.930494] env[62585]: INFO nova.compute.manager [-] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Took 1.02 seconds to deallocate network for instance. [ 592.933115] env[62585]: DEBUG nova.compute.claims [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 592.933314] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.255860] env[62585]: DEBUG nova.compute.utils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.258058] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 593.258058] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.303267] env[62585]: DEBUG nova.policy [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9a2336e0b124f03ad700405bcad8f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b8936eaf754cbcbd1b099846a3146d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 593.409199] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Releasing lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.409864] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 593.410094] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 593.410398] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-430c80c2-f7b7-45a3-95fd-70b45a171b6a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.423766] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b25988-3eb3-4bb1-bc31-25df81effd4d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.447979] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 92080abc-eb47-439b-b702-d226666fa155 could not be found. [ 593.448218] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 593.448398] env[62585]: INFO nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Took 0.04 seconds to destroy the instance on the hypervisor. [ 593.448638] env[62585]: DEBUG oslo.service.loopingcall [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.451322] env[62585]: DEBUG nova.compute.manager [-] [instance: 92080abc-eb47-439b-b702-d226666fa155] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 593.451401] env[62585]: DEBUG nova.network.neutron [-] [instance: 92080abc-eb47-439b-b702-d226666fa155] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 593.472307] env[62585]: DEBUG nova.network.neutron [-] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.600325] env[62585]: DEBUG nova.compute.manager [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] [instance: 92080abc-eb47-439b-b702-d226666fa155] Received event network-changed-03242677-863c-408a-8d91-a5a2042277e5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 593.601403] env[62585]: DEBUG nova.compute.manager [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] [instance: 92080abc-eb47-439b-b702-d226666fa155] Refreshing instance network info cache due to event network-changed-03242677-863c-408a-8d91-a5a2042277e5. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 593.601403] env[62585]: DEBUG oslo_concurrency.lockutils [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] Acquiring lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.601403] env[62585]: DEBUG oslo_concurrency.lockutils [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] Acquired lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.601403] env[62585]: DEBUG nova.network.neutron [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] [instance: 92080abc-eb47-439b-b702-d226666fa155] Refreshing network info cache for port 03242677-863c-408a-8d91-a5a2042277e5 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 593.624066] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Successfully created port: d1a6a81d-1664-401f-ae68-915994245700 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.637050] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60713600-a88e-41a3-b028-97c292343a1b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.645114] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98624bbf-884f-486a-85e1-3e25c378a327 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.676211] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a6e2b4-0fe4-43c3-92cf-f8c1780ac8bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.684639] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fbeedd-621a-49f6-b73a-fe1619518d23 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.700131] env[62585]: DEBUG nova.compute.provider_tree [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.761932] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 593.976616] env[62585]: DEBUG nova.network.neutron [-] [instance: 92080abc-eb47-439b-b702-d226666fa155] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.120868] env[62585]: DEBUG nova.network.neutron [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.206512] env[62585]: DEBUG nova.scheduler.client.report [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.245261] env[62585]: DEBUG nova.network.neutron [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] [instance: 92080abc-eb47-439b-b702-d226666fa155] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.479029] env[62585]: INFO nova.compute.manager [-] [instance: 92080abc-eb47-439b-b702-d226666fa155] Took 1.03 seconds to deallocate network for instance. [ 594.481657] env[62585]: DEBUG nova.compute.claims [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 594.482067] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.579666] env[62585]: ERROR nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 594.579666] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.579666] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.579666] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.579666] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.579666] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.579666] env[62585]: ERROR nova.compute.manager raise self.value [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.579666] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 594.579666] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.579666] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 594.580042] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.580042] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 594.580042] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 594.580042] env[62585]: ERROR nova.compute.manager [ 594.580042] env[62585]: Traceback (most recent call last): [ 594.580042] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 594.580042] env[62585]: listener.cb(fileno) [ 594.580042] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.580042] env[62585]: result = function(*args, **kwargs) [ 594.580042] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.580042] env[62585]: return func(*args, **kwargs) [ 594.580042] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.580042] env[62585]: raise e [ 594.580042] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.580042] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 594.580042] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.580042] env[62585]: created_port_ids = self._update_ports_for_instance( [ 594.580042] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.580042] env[62585]: with excutils.save_and_reraise_exception(): [ 594.580042] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.580042] env[62585]: self.force_reraise() [ 594.580042] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.580042] env[62585]: raise self.value [ 594.580042] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.580042] env[62585]: updated_port = self._update_port( [ 594.580042] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.580042] env[62585]: _ensure_no_port_binding_failure(port) [ 594.580042] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.580042] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 594.580702] env[62585]: nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 594.580702] env[62585]: Removing descriptor: 15 [ 594.709735] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.710388] env[62585]: ERROR nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Traceback (most recent call last): [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self.driver.spawn(context, instance, image_meta, [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] vm_ref = self.build_virtual_machine(instance, [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.710388] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] for vif in network_info: [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return self._sync_wrapper(fn, *args, **kwargs) [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self.wait() [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self[:] = self._gt.wait() [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return self._exit_event.wait() [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] result = hub.switch() [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.710800] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return self.greenlet.switch() [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] result = function(*args, **kwargs) [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] return func(*args, **kwargs) [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] raise e [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] nwinfo = self.network_api.allocate_for_instance( [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] created_port_ids = self._update_ports_for_instance( [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] with excutils.save_and_reraise_exception(): [ 594.711169] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] self.force_reraise() [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] raise self.value [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] updated_port = self._update_port( [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] _ensure_no_port_binding_failure(port) [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] raise exception.PortBindingFailed(port_id=port['id']) [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] nova.exception.PortBindingFailed: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. [ 594.711729] env[62585]: ERROR nova.compute.manager [instance: 01432003-5c48-40e1-b22b-a538a7e34663] [ 594.712059] env[62585]: DEBUG nova.compute.utils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 594.712256] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.932s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.712434] env[62585]: DEBUG nova.objects.instance [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62585) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 594.716869] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Build of instance 01432003-5c48-40e1-b22b-a538a7e34663 was re-scheduled: Binding failed for port f88a1b20-5c93-481f-ac6a-b74c531713fc, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 594.717293] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 594.717514] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquiring lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.717671] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Acquired lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.717832] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.748337] env[62585]: DEBUG oslo_concurrency.lockutils [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] Releasing lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.748570] env[62585]: DEBUG nova.compute.manager [req-c15f8414-5eeb-4661-9847-4b352b5d984b req-95c93c24-551c-4826-97d1-77b9a9d4684e service nova] [instance: 92080abc-eb47-439b-b702-d226666fa155] Received event network-vif-deleted-03242677-863c-408a-8d91-a5a2042277e5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 594.772291] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 594.798189] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.798440] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.798594] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.798773] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.798913] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.799067] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.799277] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.799426] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.799619] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.799782] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.799949] env[62585]: DEBUG nova.virt.hardware [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.800808] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430010c6-f1fd-4005-b9ed-4f1d7db82296 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.810095] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a302c38-eb32-4bbe-81da-27e1d24b5920 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.823615] env[62585]: ERROR nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] Traceback (most recent call last): [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] yield resources [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self.driver.spawn(context, instance, image_meta, [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] vm_ref = self.build_virtual_machine(instance, [ 594.823615] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] for vif in network_info: [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] return self._sync_wrapper(fn, *args, **kwargs) [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self.wait() [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self[:] = self._gt.wait() [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] return self._exit_event.wait() [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 594.823874] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] current.throw(*self._exc) [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] result = function(*args, **kwargs) [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] return func(*args, **kwargs) [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] raise e [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] nwinfo = self.network_api.allocate_for_instance( [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] created_port_ids = self._update_ports_for_instance( [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] with excutils.save_and_reraise_exception(): [ 594.824160] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self.force_reraise() [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] raise self.value [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] updated_port = self._update_port( [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] _ensure_no_port_binding_failure(port) [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] raise exception.PortBindingFailed(port_id=port['id']) [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 594.824438] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] [ 594.824438] env[62585]: INFO nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Terminating instance [ 594.825596] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.825764] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.826658] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 595.233962] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.324764] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.350583] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.430699] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.623560] env[62585]: DEBUG nova.compute.manager [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] [instance: a8af7330-6454-439c-870b-73d1637b6438] Received event network-changed-d1a6a81d-1664-401f-ae68-915994245700 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 595.623814] env[62585]: DEBUG nova.compute.manager [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] [instance: a8af7330-6454-439c-870b-73d1637b6438] Refreshing instance network info cache due to event network-changed-d1a6a81d-1664-401f-ae68-915994245700. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 595.624027] env[62585]: DEBUG oslo_concurrency.lockutils [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] Acquiring lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.722650] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9d270592-1b20-47bb-8fba-19396800695b tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.299837] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.051s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.303175] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Releasing lock "refresh_cache-01432003-5c48-40e1-b22b-a538a7e34663" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.303334] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 596.303531] env[62585]: DEBUG nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 596.303712] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 596.306451] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.306451] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 596.306451] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 596.306983] env[62585]: DEBUG oslo_concurrency.lockutils [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] Acquired lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.307164] env[62585]: DEBUG nova.network.neutron [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] [instance: a8af7330-6454-439c-870b-73d1637b6438] Refreshing network info cache for port d1a6a81d-1664-401f-ae68-915994245700 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 596.307977] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3d8ee6f-66ba-4040-95c8-5c1ef293fdce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.316884] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff28efdf-0276-40d3-ab9a-348f427ec9be {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.328294] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.341821] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a8af7330-6454-439c-870b-73d1637b6438 could not be found. [ 596.342041] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 596.342218] env[62585]: INFO nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Took 0.04 seconds to destroy the instance on the hypervisor. [ 596.342443] env[62585]: DEBUG oslo.service.loopingcall [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 596.342913] env[62585]: DEBUG nova.compute.manager [-] [instance: a8af7330-6454-439c-870b-73d1637b6438] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 596.343029] env[62585]: DEBUG nova.network.neutron [-] [instance: a8af7330-6454-439c-870b-73d1637b6438] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 596.363522] env[62585]: DEBUG nova.network.neutron [-] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.826253] env[62585]: DEBUG nova.network.neutron [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.831036] env[62585]: DEBUG nova.network.neutron [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.868891] env[62585]: DEBUG nova.network.neutron [-] [instance: a8af7330-6454-439c-870b-73d1637b6438] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.911180] env[62585]: DEBUG nova.network.neutron [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] [instance: a8af7330-6454-439c-870b-73d1637b6438] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.146204] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2902428c-eb5e-4b45-9eb5-8d02b5dc59d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.153570] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a140b03-6097-4514-b4b9-1d7bd202337f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.185150] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db7ff3d-03c3-49bf-a8d3-525308d7aaaf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.194332] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2900e184-3783-4042-a05d-9bc9688cc7c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.207930] env[62585]: DEBUG nova.compute.provider_tree [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.336831] env[62585]: INFO nova.compute.manager [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] [instance: 01432003-5c48-40e1-b22b-a538a7e34663] Took 1.03 seconds to deallocate network for instance. [ 597.371025] env[62585]: INFO nova.compute.manager [-] [instance: a8af7330-6454-439c-870b-73d1637b6438] Took 1.03 seconds to deallocate network for instance. [ 597.373239] env[62585]: DEBUG nova.compute.claims [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 597.373452] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.416202] env[62585]: DEBUG oslo_concurrency.lockutils [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] Releasing lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.416460] env[62585]: DEBUG nova.compute.manager [req-6fe93cbc-9a28-4c0a-9d56-f7f34950b384 req-e790ea65-bb60-434e-a81b-a831cf131d27 service nova] [instance: a8af7330-6454-439c-870b-73d1637b6438] Received event network-vif-deleted-d1a6a81d-1664-401f-ae68-915994245700 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.711786] env[62585]: DEBUG nova.scheduler.client.report [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 598.216887] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.917s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.217532] env[62585]: ERROR nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Traceback (most recent call last): [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self.driver.spawn(context, instance, image_meta, [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] vm_ref = self.build_virtual_machine(instance, [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.217532] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] for vif in network_info: [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return self._sync_wrapper(fn, *args, **kwargs) [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self.wait() [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self[:] = self._gt.wait() [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return self._exit_event.wait() [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] result = hub.switch() [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.217888] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return self.greenlet.switch() [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] result = function(*args, **kwargs) [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] return func(*args, **kwargs) [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] raise e [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] nwinfo = self.network_api.allocate_for_instance( [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] created_port_ids = self._update_ports_for_instance( [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] with excutils.save_and_reraise_exception(): [ 598.218299] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] self.force_reraise() [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] raise self.value [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] updated_port = self._update_port( [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] _ensure_no_port_binding_failure(port) [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] raise exception.PortBindingFailed(port_id=port['id']) [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] nova.exception.PortBindingFailed: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. [ 598.218686] env[62585]: ERROR nova.compute.manager [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] [ 598.219048] env[62585]: DEBUG nova.compute.utils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 598.219805] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.579s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.223488] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Build of instance 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7 was re-scheduled: Binding failed for port 43ce9572-81e6-47d4-9fc6-74bcf320e382, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 598.223488] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 598.223488] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquiring lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.223653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Acquired lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.223681] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.369745] env[62585]: INFO nova.scheduler.client.report [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Deleted allocations for instance 01432003-5c48-40e1-b22b-a538a7e34663 [ 598.744163] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.805108] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.877496] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a914de-b8e3-443b-98e8-cfedc38cc324 tempest-ServersWithSpecificFlavorTestJSON-1841125857 tempest-ServersWithSpecificFlavorTestJSON-1841125857-project-member] Lock "01432003-5c48-40e1-b22b-a538a7e34663" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.927s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.058634] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d6cb3f-8e1a-461f-b2b3-9049ac471ca4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.067114] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05956121-f621-4f9b-b7b7-1309d12c8d8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.095406] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b2a87d-e8bc-4630-baa9-9432dec7846f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.102355] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288bd4a6-a1e9-4c68-8ffe-ae6fc1e901f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.114829] env[62585]: DEBUG nova.compute.provider_tree [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.308518] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Releasing lock "refresh_cache-84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.308743] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 599.308954] env[62585]: DEBUG nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.309088] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.323673] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.380497] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 599.618456] env[62585]: DEBUG nova.scheduler.client.report [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 599.826656] env[62585]: DEBUG nova.network.neutron [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.902879] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.125323] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.905s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.126367] env[62585]: ERROR nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Traceback (most recent call last): [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self.driver.spawn(context, instance, image_meta, [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] vm_ref = self.build_virtual_machine(instance, [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.126367] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] for vif in network_info: [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return self._sync_wrapper(fn, *args, **kwargs) [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self.wait() [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self[:] = self._gt.wait() [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return self._exit_event.wait() [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] result = hub.switch() [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.126800] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return self.greenlet.switch() [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] result = function(*args, **kwargs) [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] return func(*args, **kwargs) [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] raise e [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] nwinfo = self.network_api.allocate_for_instance( [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] created_port_ids = self._update_ports_for_instance( [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] with excutils.save_and_reraise_exception(): [ 600.127149] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] self.force_reraise() [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] raise self.value [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] updated_port = self._update_port( [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] _ensure_no_port_binding_failure(port) [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] raise exception.PortBindingFailed(port_id=port['id']) [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] nova.exception.PortBindingFailed: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. [ 600.127549] env[62585]: ERROR nova.compute.manager [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] [ 600.128167] env[62585]: DEBUG nova.compute.utils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.130054] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.699s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.137468] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Build of instance 971d6e19-044b-4af8-b6c3-12b617cc24fe was re-scheduled: Binding failed for port aa786168-9bf7-46ad-957b-c6a634d81e7f, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 600.137468] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 600.137468] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquiring lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.137468] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Acquired lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.139023] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.329539] env[62585]: INFO nova.compute.manager [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] [instance: 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7] Took 1.02 seconds to deallocate network for instance. [ 600.670177] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.783684] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.117778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ce8b01-c1c5-494a-9a56-0d4735b507e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.126634] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a1a382-4ce6-4a1a-bed2-8b57dea72f06 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.170835] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe5f80e-2c01-4207-9b4d-8106a5851e88 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.181322] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af8a6e2-e163-41c0-b009-ccf65d4b56e0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.196087] env[62585]: DEBUG nova.compute.provider_tree [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.285370] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Releasing lock "refresh_cache-971d6e19-044b-4af8-b6c3-12b617cc24fe" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.285660] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 601.285857] env[62585]: DEBUG nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.286036] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.301418] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.375909] env[62585]: INFO nova.scheduler.client.report [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Deleted allocations for instance 84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7 [ 601.699540] env[62585]: DEBUG nova.scheduler.client.report [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.804920] env[62585]: DEBUG nova.network.neutron [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.884780] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e18a6acf-4190-4198-95fd-3fd60fc974ff tempest-ServerExternalEventsTest-520232516 tempest-ServerExternalEventsTest-520232516-project-member] Lock "84e40295-3fa4-4e6f-ade6-bf6f4c60d6a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.775s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.203808] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.074s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.204588] env[62585]: ERROR nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Traceback (most recent call last): [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self.driver.spawn(context, instance, image_meta, [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] vm_ref = self.build_virtual_machine(instance, [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.204588] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] for vif in network_info: [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return self._sync_wrapper(fn, *args, **kwargs) [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self.wait() [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self[:] = self._gt.wait() [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return self._exit_event.wait() [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] result = hub.switch() [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.204974] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return self.greenlet.switch() [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] result = function(*args, **kwargs) [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] return func(*args, **kwargs) [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] raise e [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] nwinfo = self.network_api.allocate_for_instance( [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] created_port_ids = self._update_ports_for_instance( [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] with excutils.save_and_reraise_exception(): [ 602.205294] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] self.force_reraise() [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] raise self.value [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] updated_port = self._update_port( [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] _ensure_no_port_binding_failure(port) [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] raise exception.PortBindingFailed(port_id=port['id']) [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] nova.exception.PortBindingFailed: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. [ 602.205715] env[62585]: ERROR nova.compute.manager [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] [ 602.206583] env[62585]: DEBUG nova.compute.utils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 602.207012] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.585s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.207236] env[62585]: DEBUG nova.objects.instance [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62585) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 602.215672] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Build of instance 68b4ca9d-f934-4b44-8c34-0b1bfb848672 was re-scheduled: Binding failed for port 0c82d411-d593-4a34-a6dd-f5f0281b5d53, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 602.216263] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 602.216502] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquiring lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.216803] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Acquired lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.217159] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.309360] env[62585]: INFO nova.compute.manager [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] [instance: 971d6e19-044b-4af8-b6c3-12b617cc24fe] Took 1.02 seconds to deallocate network for instance. [ 602.389978] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 602.746639] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.897318] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.915006] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.221376] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90ba82c7-0dfc-41c1-a027-242d41249cfc tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.222125] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.492s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.223859] env[62585]: INFO nova.compute.claims [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.352352] env[62585]: INFO nova.scheduler.client.report [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Deleted allocations for instance 971d6e19-044b-4af8-b6c3-12b617cc24fe [ 603.402641] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Releasing lock "refresh_cache-68b4ca9d-f934-4b44-8c34-0b1bfb848672" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.402882] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 603.403084] env[62585]: DEBUG nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.403262] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 603.430090] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.863631] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f3d4602c-4ce1-43a8-8fae-0841734098b9 tempest-VolumesAssistedSnapshotsTest-1678902741 tempest-VolumesAssistedSnapshotsTest-1678902741-project-member] Lock "971d6e19-044b-4af8-b6c3-12b617cc24fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.808s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.932992] env[62585]: DEBUG nova.network.neutron [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.366142] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 604.435858] env[62585]: INFO nova.compute.manager [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] [instance: 68b4ca9d-f934-4b44-8c34-0b1bfb848672] Took 1.03 seconds to deallocate network for instance. [ 604.580008] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109566e1-9b30-488d-8211-a2f23310c3e3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.588032] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0179547-966a-4c2a-867a-c09906a3ef62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.617510] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036d4b55-bec1-48ff-afa5-ea55ee64eb99 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.625665] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5cf37b-8b80-474b-99b0-e1179f2f4a3d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.638680] env[62585]: DEBUG nova.compute.provider_tree [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.891853] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.143776] env[62585]: DEBUG nova.scheduler.client.report [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.355681] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.355681] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.470468] env[62585]: INFO nova.scheduler.client.report [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Deleted allocations for instance 68b4ca9d-f934-4b44-8c34-0b1bfb848672 [ 605.657737] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.658294] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.660813] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.458s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.660996] env[62585]: DEBUG nova.objects.instance [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62585) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 605.862525] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.862525] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 605.862525] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Rebuilding the list of instances to heal {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 605.982337] env[62585]: DEBUG oslo_concurrency.lockutils [None req-99ae50cb-229b-45a3-8a17-4d34df17045d tempest-ServerMetadataNegativeTestJSON-1069101450 tempest-ServerMetadataNegativeTestJSON-1069101450-project-member] Lock "68b4ca9d-f934-4b44-8c34-0b1bfb848672" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.245s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.169024] env[62585]: DEBUG nova.compute.utils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.172785] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.173101] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.241488] env[62585]: DEBUG nova.policy [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6e238fbd13844778d829bbbd5564560', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7767e044b60d4e0c8b04051967ec97d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 606.367580] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 606.367750] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 606.367877] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 606.368009] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 92080abc-eb47-439b-b702-d226666fa155] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 606.368605] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: a8af7330-6454-439c-870b-73d1637b6438] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 606.368947] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 606.416286] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "refresh_cache-dd387320-7101-440c-80bc-a7d19a654df8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.416999] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquired lock "refresh_cache-dd387320-7101-440c-80bc-a7d19a654df8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.416999] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Forcefully refreshing network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 606.416999] env[62585]: DEBUG nova.objects.instance [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lazy-loading 'info_cache' on Instance uuid dd387320-7101-440c-80bc-a7d19a654df8 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 606.491649] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 606.562626] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Successfully created port: 36005db7-d1cf-4fde-9210-7ed3242f300a {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.672981] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 606.680961] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6fc029dd-9967-47a9-bc96-bddc6b749108 tempest-ServersAdmin275Test-1432674827 tempest-ServersAdmin275Test-1432674827-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.681391] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.687s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.681641] env[62585]: DEBUG nova.objects.instance [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lazy-loading 'resources' on Instance uuid 149bd77b-9583-42e5-8c82-f795cac53b87 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 607.014610] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.472508] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.590908] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5817527-deb9-4ad0-8aa0-fda8aa9078ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.599087] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af696c5-901d-4d41-a75e-2c3e221660fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.635277] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625299b8-1ef6-483e-a08b-1f507425f0c1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.644592] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8abaceb-cb6c-4084-9c45-5d52aa32645b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.658695] env[62585]: DEBUG nova.compute.provider_tree [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.685607] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.731018] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.731018] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.731018] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.731268] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.731268] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.731268] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.731268] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.731268] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.731442] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.731442] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.731442] env[62585]: DEBUG nova.virt.hardware [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.731442] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d48f55-9d1a-45c9-9e93-11708715d2ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.744243] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822af87d-9c9e-4c1e-aee0-7584c6a659c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.826214] env[62585]: DEBUG nova.compute.manager [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Received event network-changed-36005db7-d1cf-4fde-9210-7ed3242f300a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.826405] env[62585]: DEBUG nova.compute.manager [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Refreshing instance network info cache due to event network-changed-36005db7-d1cf-4fde-9210-7ed3242f300a. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 607.826619] env[62585]: DEBUG oslo_concurrency.lockutils [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] Acquiring lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.826761] env[62585]: DEBUG oslo_concurrency.lockutils [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] Acquired lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.826933] env[62585]: DEBUG nova.network.neutron [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Refreshing network info cache for port 36005db7-d1cf-4fde-9210-7ed3242f300a {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.094010] env[62585]: ERROR nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 608.094010] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.094010] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.094010] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.094010] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.094010] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.094010] env[62585]: ERROR nova.compute.manager raise self.value [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.094010] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 608.094010] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.094010] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 608.094641] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.094641] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 608.094641] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 608.094641] env[62585]: ERROR nova.compute.manager [ 608.094641] env[62585]: Traceback (most recent call last): [ 608.094641] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 608.094641] env[62585]: listener.cb(fileno) [ 608.094641] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.094641] env[62585]: result = function(*args, **kwargs) [ 608.094641] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.094641] env[62585]: return func(*args, **kwargs) [ 608.094641] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.094641] env[62585]: raise e [ 608.094641] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.094641] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 608.094641] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.094641] env[62585]: created_port_ids = self._update_ports_for_instance( [ 608.094641] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.094641] env[62585]: with excutils.save_and_reraise_exception(): [ 608.094641] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.094641] env[62585]: self.force_reraise() [ 608.094641] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.094641] env[62585]: raise self.value [ 608.094641] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.094641] env[62585]: updated_port = self._update_port( [ 608.094641] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.094641] env[62585]: _ensure_no_port_binding_failure(port) [ 608.094641] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.094641] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 608.095317] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 608.095317] env[62585]: Removing descriptor: 15 [ 608.095532] env[62585]: ERROR nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Traceback (most recent call last): [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] yield resources [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self.driver.spawn(context, instance, image_meta, [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] vm_ref = self.build_virtual_machine(instance, [ 608.095532] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] for vif in network_info: [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return self._sync_wrapper(fn, *args, **kwargs) [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self.wait() [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self[:] = self._gt.wait() [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return self._exit_event.wait() [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.095857] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] result = hub.switch() [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return self.greenlet.switch() [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] result = function(*args, **kwargs) [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return func(*args, **kwargs) [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] raise e [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] nwinfo = self.network_api.allocate_for_instance( [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] created_port_ids = self._update_ports_for_instance( [ 608.096196] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] with excutils.save_and_reraise_exception(): [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self.force_reraise() [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] raise self.value [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] updated_port = self._update_port( [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] _ensure_no_port_binding_failure(port) [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] raise exception.PortBindingFailed(port_id=port['id']) [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 608.096501] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] [ 608.096836] env[62585]: INFO nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Terminating instance [ 608.099261] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.122559] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.163388] env[62585]: DEBUG nova.scheduler.client.report [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.353644] env[62585]: DEBUG nova.network.neutron [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.439911] env[62585]: DEBUG nova.network.neutron [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.629205] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Releasing lock "refresh_cache-dd387320-7101-440c-80bc-a7d19a654df8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.629205] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Updated the network info_cache for instance {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 608.629205] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.629384] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.630195] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.630195] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.630195] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.630195] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.630195] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 608.630195] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.673023] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.673023] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.733s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.710654] env[62585]: INFO nova.scheduler.client.report [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Deleted allocations for instance 149bd77b-9583-42e5-8c82-f795cac53b87 [ 608.949753] env[62585]: DEBUG oslo_concurrency.lockutils [req-edd4b532-8bfb-4892-8bd7-0c8ef8845f15 req-614a18fb-facd-41f5-8c10-5d03c3f505fc service nova] Releasing lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.949753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquired lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.949753] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.134029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.223139] env[62585]: DEBUG oslo_concurrency.lockutils [None req-71d597f1-8ddf-4ce4-9104-41e52ebcc3ac tempest-ServerShowV257Test-1218433941 tempest-ServerShowV257Test-1218433941-project-member] Lock "149bd77b-9583-42e5-8c82-f795cac53b87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.987s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.484907] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.607990] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.645936] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6598b9d5-9ec8-4983-8875-a8117e81df90 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.655188] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85305c05-ae30-46c2-a1ce-8fed2143abe2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.691144] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8420bf28-6183-49b6-871b-0e8030324d69 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.699986] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e376f79b-a49d-4ab5-a347-145db75166b1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.714095] env[62585]: DEBUG nova.compute.provider_tree [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.942869] env[62585]: DEBUG nova.compute.manager [req-e6bb39e2-8174-403b-96b4-ddd8fd86917d req-e40c3dfb-0c53-436e-8232-843d8fdae28c service nova] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Received event network-vif-deleted-36005db7-d1cf-4fde-9210-7ed3242f300a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 610.113933] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Releasing lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.114367] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 610.115254] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 610.115254] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d9177df-620c-4292-8876-bdb645a0692f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.127216] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f9e1ab-cf0e-4ed0-aa52-77c45e4afff3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.154772] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29f9e25a-a0b2-4bb8-b59a-3617819d3be5 could not be found. [ 610.155106] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 610.155379] env[62585]: INFO nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 610.158589] env[62585]: DEBUG oslo.service.loopingcall [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.158589] env[62585]: DEBUG nova.compute.manager [-] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 610.158589] env[62585]: DEBUG nova.network.neutron [-] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 610.191636] env[62585]: DEBUG nova.network.neutron [-] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.218737] env[62585]: DEBUG nova.scheduler.client.report [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 610.695311] env[62585]: DEBUG nova.network.neutron [-] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.723512] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.051s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.724669] env[62585]: ERROR nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Traceback (most recent call last): [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self.driver.spawn(context, instance, image_meta, [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] vm_ref = self.build_virtual_machine(instance, [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.724669] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] for vif in network_info: [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return self._sync_wrapper(fn, *args, **kwargs) [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self.wait() [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self[:] = self._gt.wait() [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return self._exit_event.wait() [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] result = hub.switch() [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 610.725130] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return self.greenlet.switch() [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] result = function(*args, **kwargs) [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] return func(*args, **kwargs) [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] raise e [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] nwinfo = self.network_api.allocate_for_instance( [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] created_port_ids = self._update_ports_for_instance( [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] with excutils.save_and_reraise_exception(): [ 610.725448] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] self.force_reraise() [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] raise self.value [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] updated_port = self._update_port( [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] _ensure_no_port_binding_failure(port) [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] raise exception.PortBindingFailed(port_id=port['id']) [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] nova.exception.PortBindingFailed: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. [ 610.725804] env[62585]: ERROR nova.compute.manager [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] [ 610.726129] env[62585]: DEBUG nova.compute.utils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 610.730312] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.176s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.730552] env[62585]: DEBUG nova.objects.instance [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lazy-loading 'resources' on Instance uuid dd387320-7101-440c-80bc-a7d19a654df8 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 610.737025] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Build of instance b7686890-0ee7-4c5e-85f5-90a5c5241950 was re-scheduled: Binding failed for port 343feb77-bd53-4d00-99d7-b144412773b8, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 610.737725] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 610.738130] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquiring lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.738301] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Acquired lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.738750] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.198386] env[62585]: INFO nova.compute.manager [-] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Took 1.04 seconds to deallocate network for instance. [ 611.200810] env[62585]: DEBUG nova.compute.claims [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 611.200983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.267097] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.418790] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.681123] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a230af5-834c-4c4e-82e5-92b27345a38d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.692981] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b80cff-c60e-4a93-9a7d-896f69d56457 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.723695] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6f31ac-a219-4f91-bb7e-741d6c2e060a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.732086] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987799dc-de32-431f-ba58-4f350d9564ee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.746100] env[62585]: DEBUG nova.compute.provider_tree [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.923561] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Releasing lock "refresh_cache-b7686890-0ee7-4c5e-85f5-90a5c5241950" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.923842] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 611.924047] env[62585]: DEBUG nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 611.924228] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 611.949940] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.251378] env[62585]: DEBUG nova.scheduler.client.report [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.451370] env[62585]: DEBUG nova.network.neutron [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.755452] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.758361] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.957s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.778684] env[62585]: INFO nova.scheduler.client.report [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Deleted allocations for instance dd387320-7101-440c-80bc-a7d19a654df8 [ 612.953760] env[62585]: INFO nova.compute.manager [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] [instance: b7686890-0ee7-4c5e-85f5-90a5c5241950] Took 1.03 seconds to deallocate network for instance. [ 613.289387] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f74ebdef-3f94-413d-821a-423aebec62c7 tempest-ServersAdmin275Test-515678000 tempest-ServersAdmin275Test-515678000-project-member] Lock "dd387320-7101-440c-80bc-a7d19a654df8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.476s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.616810] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f8c4dc-0a31-49fe-8ff5-b1037229a42c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.624217] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e924b075-2bc6-4eb8-9a91-6c5355fd9c16 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.653868] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148c34fb-a767-44ba-9c47-cc958b1157ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.661231] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da26009d-b5c5-4c52-b8fe-cdf41db96def {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.673970] env[62585]: DEBUG nova.compute.provider_tree [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.995181] env[62585]: INFO nova.scheduler.client.report [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Deleted allocations for instance b7686890-0ee7-4c5e-85f5-90a5c5241950 [ 614.177373] env[62585]: DEBUG nova.scheduler.client.report [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.505845] env[62585]: DEBUG oslo_concurrency.lockutils [None req-62b93870-045f-4285-8d1d-ad7877dac2ed tempest-ImagesNegativeTestJSON-2116603881 tempest-ImagesNegativeTestJSON-2116603881-project-member] Lock "b7686890-0ee7-4c5e-85f5-90a5c5241950" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.707s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.685258] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.927s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.685965] env[62585]: ERROR nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Traceback (most recent call last): [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self.driver.spawn(context, instance, image_meta, [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self._vmops.spawn(context, instance, image_meta, injected_files, [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] vm_ref = self.build_virtual_machine(instance, [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] vif_infos = vmwarevif.get_vif_info(self._session, [ 614.685965] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] for vif in network_info: [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return self._sync_wrapper(fn, *args, **kwargs) [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self.wait() [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self[:] = self._gt.wait() [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return self._exit_event.wait() [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] result = hub.switch() [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 614.686281] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return self.greenlet.switch() [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] result = function(*args, **kwargs) [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] return func(*args, **kwargs) [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] raise e [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] nwinfo = self.network_api.allocate_for_instance( [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] created_port_ids = self._update_ports_for_instance( [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] with excutils.save_and_reraise_exception(): [ 614.686622] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] self.force_reraise() [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] raise self.value [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] updated_port = self._update_port( [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] _ensure_no_port_binding_failure(port) [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] raise exception.PortBindingFailed(port_id=port['id']) [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] nova.exception.PortBindingFailed: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. [ 614.686942] env[62585]: ERROR nova.compute.manager [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] [ 614.687206] env[62585]: DEBUG nova.compute.utils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 614.688483] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.755s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.691690] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Build of instance 5177b4e2-e990-47e6-9f2b-156ca0ee8387 was re-scheduled: Binding failed for port 3ca9cf03-5a5a-4d88-beab-3e0e6badf163, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 614.692266] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 614.693119] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquiring lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.693119] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Acquired lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.693119] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.008683] env[62585]: DEBUG nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 615.225866] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.341588] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.532659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.534773] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327421c9-2175-4585-aab5-3fe320ddba0a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.542665] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e5fc61-4485-4577-b675-0a72814aa6f5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.572254] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5598e7-c39f-4cf4-bcbd-d22a9aa7eda8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.579920] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e55b11e-cfeb-4273-b3b6-b29c47e7f103 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.594245] env[62585]: DEBUG nova.compute.provider_tree [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.844753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Releasing lock "refresh_cache-5177b4e2-e990-47e6-9f2b-156ca0ee8387" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.845134] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 615.845330] env[62585]: DEBUG nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 615.845449] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.868378] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.097588] env[62585]: DEBUG nova.scheduler.client.report [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.373693] env[62585]: DEBUG nova.network.neutron [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.602618] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.914s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.603249] env[62585]: ERROR nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Traceback (most recent call last): [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self.driver.spawn(context, instance, image_meta, [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] vm_ref = self.build_virtual_machine(instance, [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.603249] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] for vif in network_info: [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return self._sync_wrapper(fn, *args, **kwargs) [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self.wait() [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self[:] = self._gt.wait() [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return self._exit_event.wait() [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] result = hub.switch() [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 616.603821] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return self.greenlet.switch() [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] result = function(*args, **kwargs) [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] return func(*args, **kwargs) [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] raise e [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] nwinfo = self.network_api.allocate_for_instance( [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] created_port_ids = self._update_ports_for_instance( [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] with excutils.save_and_reraise_exception(): [ 616.604478] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] self.force_reraise() [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] raise self.value [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] updated_port = self._update_port( [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] _ensure_no_port_binding_failure(port) [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] raise exception.PortBindingFailed(port_id=port['id']) [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] nova.exception.PortBindingFailed: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. [ 616.605259] env[62585]: ERROR nova.compute.manager [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] [ 616.605757] env[62585]: DEBUG nova.compute.utils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 616.606195] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.124s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.613468] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Build of instance 6f2ca381-b4a3-47ce-b135-dbceb7e44d24 was re-scheduled: Binding failed for port 7bd7a5fd-69b2-4532-93c0-adedf2ca7e4d, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 616.613919] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 616.614160] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquiring lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.614306] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Acquired lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.614461] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 616.878824] env[62585]: INFO nova.compute.manager [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] [instance: 5177b4e2-e990-47e6-9f2b-156ca0ee8387] Took 1.03 seconds to deallocate network for instance. [ 617.159057] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.366550] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.515017] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6a1682-1018-4cd2-a3dc-88c3074acdc6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.520318] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373d9320-ff7f-48aa-9319-c7dcc3a12c15 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.551149] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cd885c-22cf-4e9e-9419-c3db6289e415 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.558832] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85f3374-f897-4d5c-914e-0bf89f0a89cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.574116] env[62585]: DEBUG nova.compute.provider_tree [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.869645] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Releasing lock "refresh_cache-6f2ca381-b4a3-47ce-b135-dbceb7e44d24" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.869962] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 617.870203] env[62585]: DEBUG nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 617.870333] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 617.902288] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.927604] env[62585]: INFO nova.scheduler.client.report [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Deleted allocations for instance 5177b4e2-e990-47e6-9f2b-156ca0ee8387 [ 618.076712] env[62585]: DEBUG nova.scheduler.client.report [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.405586] env[62585]: DEBUG nova.network.neutron [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.441706] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3ed8d6ab-0a49-43b2-b97c-445bb2bfc22c tempest-FloatingIPsAssociationNegativeTestJSON-1602108693 tempest-FloatingIPsAssociationNegativeTestJSON-1602108693-project-member] Lock "5177b4e2-e990-47e6-9f2b-156ca0ee8387" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.945s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.587304] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.980s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.587304] env[62585]: ERROR nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] Traceback (most recent call last): [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self.driver.spawn(context, instance, image_meta, [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.587304] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] vm_ref = self.build_virtual_machine(instance, [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] for vif in network_info: [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] return self._sync_wrapper(fn, *args, **kwargs) [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self.wait() [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self[:] = self._gt.wait() [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] return self._exit_event.wait() [ 618.587614] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] current.throw(*self._exc) [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] result = function(*args, **kwargs) [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] return func(*args, **kwargs) [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] raise e [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] nwinfo = self.network_api.allocate_for_instance( [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] created_port_ids = self._update_ports_for_instance( [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.587910] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] with excutils.save_and_reraise_exception(): [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] self.force_reraise() [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] raise self.value [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] updated_port = self._update_port( [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] _ensure_no_port_binding_failure(port) [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] raise exception.PortBindingFailed(port_id=port['id']) [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] nova.exception.PortBindingFailed: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. [ 618.588322] env[62585]: ERROR nova.compute.manager [instance: 92080abc-eb47-439b-b702-d226666fa155] [ 618.588600] env[62585]: DEBUG nova.compute.utils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 618.588965] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.216s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.595980] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Build of instance 92080abc-eb47-439b-b702-d226666fa155 was re-scheduled: Binding failed for port 03242677-863c-408a-8d91-a5a2042277e5, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 618.596396] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 618.596621] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquiring lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.596762] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Acquired lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.596936] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.912778] env[62585]: INFO nova.compute.manager [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] [instance: 6f2ca381-b4a3-47ce-b135-dbceb7e44d24] Took 1.04 seconds to deallocate network for instance. [ 618.945463] env[62585]: DEBUG nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 619.128022] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.267237] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.480450] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.519347] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f8cfb0-49f2-4127-bca9-22ea6f596e2b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.527404] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd96727-b189-4e56-b503-e66d313c62a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.560878] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd060dcf-b5b0-4bce-a662-703e540a2ab9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.569619] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4ab0ff-727f-430a-93b7-0dcb0bcf6c4f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.583643] env[62585]: DEBUG nova.compute.provider_tree [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.770686] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Releasing lock "refresh_cache-92080abc-eb47-439b-b702-d226666fa155" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.771260] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 619.771260] env[62585]: DEBUG nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 619.771514] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 619.829440] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.964851] env[62585]: INFO nova.scheduler.client.report [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Deleted allocations for instance 6f2ca381-b4a3-47ce-b135-dbceb7e44d24 [ 620.088731] env[62585]: DEBUG nova.scheduler.client.report [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.332808] env[62585]: DEBUG nova.network.neutron [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.478798] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f4ef1afc-1486-466b-9bdc-b73b6386ee4b tempest-ServerAddressesNegativeTestJSON-654227640 tempest-ServerAddressesNegativeTestJSON-654227640-project-member] Lock "6f2ca381-b4a3-47ce-b135-dbceb7e44d24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.655s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.598813] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.010s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.599445] env[62585]: ERROR nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] Traceback (most recent call last): [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self.driver.spawn(context, instance, image_meta, [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self._vmops.spawn(context, instance, image_meta, injected_files, [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] vm_ref = self.build_virtual_machine(instance, [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] vif_infos = vmwarevif.get_vif_info(self._session, [ 620.599445] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] for vif in network_info: [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] return self._sync_wrapper(fn, *args, **kwargs) [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self.wait() [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self[:] = self._gt.wait() [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] return self._exit_event.wait() [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] current.throw(*self._exc) [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.599788] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] result = function(*args, **kwargs) [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] return func(*args, **kwargs) [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] raise e [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] nwinfo = self.network_api.allocate_for_instance( [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] created_port_ids = self._update_ports_for_instance( [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] with excutils.save_and_reraise_exception(): [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] self.force_reraise() [ 620.600128] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] raise self.value [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] updated_port = self._update_port( [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] _ensure_no_port_binding_failure(port) [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] raise exception.PortBindingFailed(port_id=port['id']) [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] nova.exception.PortBindingFailed: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. [ 620.600446] env[62585]: ERROR nova.compute.manager [instance: a8af7330-6454-439c-870b-73d1637b6438] [ 620.600446] env[62585]: DEBUG nova.compute.utils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 620.603907] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.701s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.605455] env[62585]: INFO nova.compute.claims [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.612171] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Build of instance a8af7330-6454-439c-870b-73d1637b6438 was re-scheduled: Binding failed for port d1a6a81d-1664-401f-ae68-915994245700, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 620.612171] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 620.612171] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.612171] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.612544] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.837864] env[62585]: INFO nova.compute.manager [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] [instance: 92080abc-eb47-439b-b702-d226666fa155] Took 1.07 seconds to deallocate network for instance. [ 620.982468] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 621.141794] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.429927] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.518161] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.872743] env[62585]: INFO nova.scheduler.client.report [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Deleted allocations for instance 92080abc-eb47-439b-b702-d226666fa155 [ 621.937026] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-a8af7330-6454-439c-870b-73d1637b6438" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.937447] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 621.938083] env[62585]: DEBUG nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 621.938402] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 621.986224] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.987189] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec68cdf-49c5-46ba-a70d-50bb68040c46 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.004250] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151521c3-3254-4e3f-a3fd-de819d6d091c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.040605] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de54e4e-9003-4261-bf90-81002bb337f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.048820] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9826d502-729e-4a92-b140-4b533f0f2ac9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.062581] env[62585]: DEBUG nova.compute.provider_tree [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.074208] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquiring lock "0049c4a4-dfc2-4968-8ab1-61c344f32e6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.074208] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Lock "0049c4a4-dfc2-4968-8ab1-61c344f32e6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.388521] env[62585]: DEBUG oslo_concurrency.lockutils [None req-19925c47-1dfd-4d27-954c-3e3a18a9ba1c tempest-ServerMetadataTestJSON-955963800 tempest-ServerMetadataTestJSON-955963800-project-member] Lock "92080abc-eb47-439b-b702-d226666fa155" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.427s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.492733] env[62585]: DEBUG nova.network.neutron [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.568955] env[62585]: DEBUG nova.scheduler.client.report [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 622.892112] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 622.999829] env[62585]: INFO nova.compute.manager [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: a8af7330-6454-439c-870b-73d1637b6438] Took 1.06 seconds to deallocate network for instance. [ 623.078015] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.080032] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 623.081962] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.166s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.083549] env[62585]: INFO nova.compute.claims [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.237066] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.237326] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.428080] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.591934] env[62585]: DEBUG nova.compute.utils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 623.596462] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 623.596706] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 623.740073] env[62585]: DEBUG nova.policy [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1189cf96a80b4cd4a38a2846668fd5f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fa8683d5d9046419d5a0a7939ebb08d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 624.046771] env[62585]: INFO nova.scheduler.client.report [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted allocations for instance a8af7330-6454-439c-870b-73d1637b6438 [ 624.097038] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 624.510751] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57196f4-7921-4883-99e1-2ff79b6121d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.521131] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589e5e8a-b737-4724-86a9-188eec674c82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.559117] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afb43d2-1eb9-4bb9-b100-2285077b4179 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.562260] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3a69b0ec-6e44-4aa5-b9aa-d2543b63595b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "a8af7330-6454-439c-870b-73d1637b6438" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.585s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.568642] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2da7eb-d7ab-4a64-9de1-1a4b466a6d62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.586563] env[62585]: DEBUG nova.compute.provider_tree [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.586563] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Successfully created port: b4dcd767-35e9-4212-8e81-d4dac543de77 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.867406] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquiring lock "2fccf900-e294-4d66-93c5-d1c7570c5d7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.867951] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Lock "2fccf900-e294-4d66-93c5-d1c7570c5d7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.067535] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 625.091074] env[62585]: DEBUG nova.scheduler.client.report [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.111010] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 625.144362] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 625.144516] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 625.144566] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.144719] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 625.144955] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.145141] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 625.145349] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 625.149018] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 625.149018] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 625.149018] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 625.149018] env[62585]: DEBUG nova.virt.hardware [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 625.149018] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abadb5a-456a-4a1f-9268-7c1204774e04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.155628] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbd989e-3526-49a3-b902-a0ecc311d20b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.595479] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.598249] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.600102] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 625.604582] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.712s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.606620] env[62585]: INFO nova.compute.claims [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.114531] env[62585]: DEBUG nova.compute.utils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 626.117128] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 626.117128] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 626.246736] env[62585]: DEBUG nova.policy [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf5f26b00a254c1ea56fd857977728b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cfa39edc29f465ab413917766d9fe65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 626.622030] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 626.630570] env[62585]: ERROR nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 626.630570] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.630570] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.630570] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.630570] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.630570] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.630570] env[62585]: ERROR nova.compute.manager raise self.value [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.630570] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 626.630570] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.630570] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 626.631219] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.631219] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 626.631219] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 626.631219] env[62585]: ERROR nova.compute.manager [ 626.631219] env[62585]: Traceback (most recent call last): [ 626.631219] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 626.631219] env[62585]: listener.cb(fileno) [ 626.631219] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.631219] env[62585]: result = function(*args, **kwargs) [ 626.631219] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 626.631219] env[62585]: return func(*args, **kwargs) [ 626.631219] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.631219] env[62585]: raise e [ 626.631219] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.631219] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 626.631219] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.631219] env[62585]: created_port_ids = self._update_ports_for_instance( [ 626.631219] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.631219] env[62585]: with excutils.save_and_reraise_exception(): [ 626.631219] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.631219] env[62585]: self.force_reraise() [ 626.631219] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.631219] env[62585]: raise self.value [ 626.631219] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.631219] env[62585]: updated_port = self._update_port( [ 626.631219] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.631219] env[62585]: _ensure_no_port_binding_failure(port) [ 626.631219] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.631219] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 626.631941] env[62585]: nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 626.631941] env[62585]: Removing descriptor: 17 [ 626.631941] env[62585]: ERROR nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Traceback (most recent call last): [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] yield resources [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self.driver.spawn(context, instance, image_meta, [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.631941] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] vm_ref = self.build_virtual_machine(instance, [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] for vif in network_info: [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return self._sync_wrapper(fn, *args, **kwargs) [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self.wait() [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self[:] = self._gt.wait() [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return self._exit_event.wait() [ 626.632241] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] result = hub.switch() [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return self.greenlet.switch() [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] result = function(*args, **kwargs) [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return func(*args, **kwargs) [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] raise e [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] nwinfo = self.network_api.allocate_for_instance( [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.632535] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] created_port_ids = self._update_ports_for_instance( [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] with excutils.save_and_reraise_exception(): [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self.force_reraise() [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] raise self.value [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] updated_port = self._update_port( [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] _ensure_no_port_binding_failure(port) [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.632855] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] raise exception.PortBindingFailed(port_id=port['id']) [ 626.633195] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 626.633195] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] [ 626.633195] env[62585]: INFO nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Terminating instance [ 626.637407] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquiring lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.637586] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquired lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.637755] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.775601] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "7504c221-2d27-4dc6-9100-9a2dca2a6036" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.776375] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "7504c221-2d27-4dc6-9100-9a2dca2a6036" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.805513] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Successfully created port: 4279ede4-346d-4574-b592-11021eb73bce {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.850355] env[62585]: DEBUG nova.compute.manager [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Received event network-changed-b4dcd767-35e9-4212-8e81-d4dac543de77 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 626.850355] env[62585]: DEBUG nova.compute.manager [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Refreshing instance network info cache due to event network-changed-b4dcd767-35e9-4212-8e81-d4dac543de77. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 626.850448] env[62585]: DEBUG oslo_concurrency.lockutils [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] Acquiring lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.038241] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b67e1e1-3015-43e6-8f87-1b68b90605b5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.049639] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e7b3f8-ca00-4038-b680-9310f134864c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.085011] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61c3aff-56b9-449a-93cf-46b4d38dabd1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.092921] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e6af87-19dd-4327-ae17-06b27261ae5d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.107027] env[62585]: DEBUG nova.compute.provider_tree [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.175839] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.338047] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.468857] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquiring lock "a0512ab3-1248-4f38-8ed9-249ba5a2d488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.469126] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Lock "a0512ab3-1248-4f38-8ed9-249ba5a2d488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.609919] env[62585]: DEBUG nova.scheduler.client.report [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.640808] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 627.671546] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 627.671738] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 627.671805] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.671990] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 627.672184] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.672332] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 627.672540] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 627.672699] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 627.672860] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 627.673031] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 627.673254] env[62585]: DEBUG nova.virt.hardware [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 627.674077] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7b97ef-48e6-4c51-8543-9b6d7ac22103 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.682641] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ec7a0c-7638-41cb-821d-c14397672bf4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.825347] env[62585]: DEBUG nova.compute.manager [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Received event network-changed-4279ede4-346d-4574-b592-11021eb73bce {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 627.825347] env[62585]: DEBUG nova.compute.manager [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Refreshing instance network info cache due to event network-changed-4279ede4-346d-4574-b592-11021eb73bce. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 627.825691] env[62585]: DEBUG oslo_concurrency.lockutils [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] Acquiring lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.825691] env[62585]: DEBUG oslo_concurrency.lockutils [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] Acquired lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.825809] env[62585]: DEBUG nova.network.neutron [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Refreshing network info cache for port 4279ede4-346d-4574-b592-11021eb73bce {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.842538] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Releasing lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.843199] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 627.843457] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 627.843814] env[62585]: DEBUG oslo_concurrency.lockutils [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] Acquired lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.844039] env[62585]: DEBUG nova.network.neutron [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Refreshing network info cache for port b4dcd767-35e9-4212-8e81-d4dac543de77 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.846710] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1dc1fad-3b2c-45dc-9ec2-2692b4a1c964 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.857531] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cbf927-dce0-46e5-89bc-3b6257900218 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.887928] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e883b58a-0fa6-48fd-a8a7-24ead857e6f1 could not be found. [ 627.888115] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 627.888115] env[62585]: INFO nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 627.888363] env[62585]: DEBUG oslo.service.loopingcall [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.888585] env[62585]: DEBUG nova.compute.manager [-] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 627.888681] env[62585]: DEBUG nova.network.neutron [-] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.905554] env[62585]: DEBUG nova.network.neutron [-] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.050715] env[62585]: ERROR nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 628.050715] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.050715] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.050715] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.050715] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.050715] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.050715] env[62585]: ERROR nova.compute.manager raise self.value [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.050715] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 628.050715] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.050715] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 628.051200] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.051200] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 628.051200] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 628.051200] env[62585]: ERROR nova.compute.manager [ 628.051200] env[62585]: Traceback (most recent call last): [ 628.051200] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 628.051200] env[62585]: listener.cb(fileno) [ 628.051200] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.051200] env[62585]: result = function(*args, **kwargs) [ 628.051200] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.051200] env[62585]: return func(*args, **kwargs) [ 628.051200] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.051200] env[62585]: raise e [ 628.051200] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.051200] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 628.051200] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.051200] env[62585]: created_port_ids = self._update_ports_for_instance( [ 628.051200] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.051200] env[62585]: with excutils.save_and_reraise_exception(): [ 628.051200] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.051200] env[62585]: self.force_reraise() [ 628.051200] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.051200] env[62585]: raise self.value [ 628.051200] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.051200] env[62585]: updated_port = self._update_port( [ 628.051200] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.051200] env[62585]: _ensure_no_port_binding_failure(port) [ 628.051200] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.051200] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 628.051900] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 628.051900] env[62585]: Removing descriptor: 15 [ 628.051900] env[62585]: ERROR nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Traceback (most recent call last): [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] yield resources [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self.driver.spawn(context, instance, image_meta, [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.051900] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] vm_ref = self.build_virtual_machine(instance, [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] for vif in network_info: [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return self._sync_wrapper(fn, *args, **kwargs) [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self.wait() [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self[:] = self._gt.wait() [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return self._exit_event.wait() [ 628.052230] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] result = hub.switch() [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return self.greenlet.switch() [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] result = function(*args, **kwargs) [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return func(*args, **kwargs) [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] raise e [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] nwinfo = self.network_api.allocate_for_instance( [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.052549] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] created_port_ids = self._update_ports_for_instance( [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] with excutils.save_and_reraise_exception(): [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self.force_reraise() [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] raise self.value [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] updated_port = self._update_port( [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] _ensure_no_port_binding_failure(port) [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.052887] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] raise exception.PortBindingFailed(port_id=port['id']) [ 628.053221] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 628.053221] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] [ 628.053221] env[62585]: INFO nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Terminating instance [ 628.059266] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquiring lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.115844] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.116268] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 628.122895] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.108s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.124465] env[62585]: INFO nova.compute.claims [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.345588] env[62585]: DEBUG nova.network.neutron [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.373705] env[62585]: DEBUG nova.network.neutron [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.414811] env[62585]: DEBUG nova.network.neutron [-] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.488029] env[62585]: DEBUG nova.network.neutron [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.493856] env[62585]: DEBUG nova.network.neutron [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.630314] env[62585]: DEBUG nova.compute.utils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.635429] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 628.635429] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 628.683871] env[62585]: DEBUG nova.policy [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd184c65cfd184ea6a36065c89e9758de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4560ed50c354e699739ca4dbd45ab9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 628.897274] env[62585]: DEBUG nova.compute.manager [req-1ddce703-ed16-49f8-af11-28479b9fbbf5 req-22fb7b0e-a1bb-48b8-85ff-14d2ab5d9e7e service nova] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Received event network-vif-deleted-b4dcd767-35e9-4212-8e81-d4dac543de77 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 628.919955] env[62585]: INFO nova.compute.manager [-] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Took 1.03 seconds to deallocate network for instance. [ 628.922560] env[62585]: DEBUG nova.compute.claims [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 628.922740] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.949561] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Successfully created port: 3940fa4a-2718-4628-ad67-601bc1ecbabc {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.966174] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquiring lock "d536e668-d597-4f8e-8d61-974e072b48c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.966399] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "d536e668-d597-4f8e-8d61-974e072b48c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.997178] env[62585]: DEBUG oslo_concurrency.lockutils [req-cc65d6e8-1998-4b12-95af-5071e16fae8b req-3cdb53cc-4551-4dbc-9458-1adbc03177c3 service nova] Releasing lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.997663] env[62585]: DEBUG oslo_concurrency.lockutils [req-04d5be8d-cb8d-401a-a152-ca318294b0f5 req-01ae3be4-185c-4a8e-8c3a-ab9f4c82da7a service nova] Releasing lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.998344] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquired lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.998599] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.136060] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 629.513691] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91a6c08-bfa3-4c26-aca5-930ef8fd8520 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.520398] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.524741] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda8045d-c9b0-4fb9-b797-816279e7023e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.558026] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c276ddfa-847c-4979-b75a-f195aa6fac9a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.564253] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720b8265-f350-44fb-8cf5-0a599fa19794 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.577811] env[62585]: DEBUG nova.compute.provider_tree [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.647753] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.648889] env[62585]: INFO nova.virt.block_device [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Booting with volume e807607d-7a5e-40f0-a2c2-47556cc4a8e8 at /dev/sda [ 629.700446] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b77bafb-0153-4717-bd4c-0fc591083551 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.709442] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5d50a0-7085-496c-9ccc-94177c41c8cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.738185] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d7206eb-42ff-4b3d-a972-1a517fa66251 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.746489] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a77e3cd-4a45-485a-ae97-87d9f7832ab3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.771949] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cfe4a3-2fe6-49d5-9ef2-9264db397e06 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.778556] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499455ee-e541-4e96-b4e9-04af29c9f746 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.791941] env[62585]: DEBUG nova.virt.block_device [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Updating existing volume attachment record: 5efa5071-a035-4e7f-98c4-89971e91e960 {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 630.080670] env[62585]: DEBUG nova.scheduler.client.report [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.131488] env[62585]: ERROR nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 630.131488] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.131488] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 630.131488] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 630.131488] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.131488] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.131488] env[62585]: ERROR nova.compute.manager raise self.value [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 630.131488] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 630.131488] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.131488] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 630.131903] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.131903] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 630.131903] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 630.131903] env[62585]: ERROR nova.compute.manager [ 630.131903] env[62585]: Traceback (most recent call last): [ 630.131903] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 630.131903] env[62585]: listener.cb(fileno) [ 630.131903] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 630.131903] env[62585]: result = function(*args, **kwargs) [ 630.131903] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 630.131903] env[62585]: return func(*args, **kwargs) [ 630.131903] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 630.131903] env[62585]: raise e [ 630.131903] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.131903] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 630.131903] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 630.131903] env[62585]: created_port_ids = self._update_ports_for_instance( [ 630.131903] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 630.131903] env[62585]: with excutils.save_and_reraise_exception(): [ 630.131903] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.131903] env[62585]: self.force_reraise() [ 630.131903] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.131903] env[62585]: raise self.value [ 630.131903] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 630.131903] env[62585]: updated_port = self._update_port( [ 630.131903] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.131903] env[62585]: _ensure_no_port_binding_failure(port) [ 630.131903] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.131903] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 630.132825] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 630.132825] env[62585]: Removing descriptor: 15 [ 630.152039] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Releasing lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.152403] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.152659] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.152980] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad0f30df-8267-4256-abf1-c292787c83a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.162959] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e193e6ef-e2bb-4b25-a195-0330d39d5458 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.175059] env[62585]: DEBUG nova.compute.manager [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Received event network-vif-deleted-4279ede4-346d-4574-b592-11021eb73bce {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 630.175217] env[62585]: DEBUG nova.compute.manager [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Received event network-changed-3940fa4a-2718-4628-ad67-601bc1ecbabc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 630.175408] env[62585]: DEBUG nova.compute.manager [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Refreshing instance network info cache due to event network-changed-3940fa4a-2718-4628-ad67-601bc1ecbabc. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 630.175613] env[62585]: DEBUG oslo_concurrency.lockutils [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] Acquiring lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.175748] env[62585]: DEBUG oslo_concurrency.lockutils [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] Acquired lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.176215] env[62585]: DEBUG nova.network.neutron [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Refreshing network info cache for port 3940fa4a-2718-4628-ad67-601bc1ecbabc {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.190126] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5cd813d1-f778-4c8a-920b-64e92a3b52af could not be found. [ 630.190390] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 630.190537] env[62585]: INFO nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Took 0.04 seconds to destroy the instance on the hypervisor. [ 630.190775] env[62585]: DEBUG oslo.service.loopingcall [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.191012] env[62585]: DEBUG nova.compute.manager [-] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.191109] env[62585]: DEBUG nova.network.neutron [-] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 630.211584] env[62585]: DEBUG nova.network.neutron [-] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.589419] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.589949] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 630.592691] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 21.459s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.592839] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.592990] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 630.593369] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.392s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.596799] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111d537a-4cf9-4bbc-b81e-337617b842dc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.606127] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8d1235-81c9-49d6-8e5c-d13d9b92f58b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.622299] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d7b3a1-f209-4c97-961e-7a83eadf25f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.629506] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daaf471-476d-45d4-ac9a-f155d08c8e9c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.660149] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181291MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 630.661036] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.674891] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquiring lock "f03bdd4b-e75e-4d70-84b3-126d2296994f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.674891] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Lock "f03bdd4b-e75e-4d70-84b3-126d2296994f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.693678] env[62585]: DEBUG nova.network.neutron [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.716041] env[62585]: DEBUG nova.network.neutron [-] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.773517] env[62585]: DEBUG nova.network.neutron [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.098541] env[62585]: DEBUG nova.compute.utils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.100131] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.100313] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.154373] env[62585]: DEBUG nova.policy [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a998788d0caa44138eb389125504e2cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81a3b6b8688c4376bc3d8e463d03e0bb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 631.218836] env[62585]: INFO nova.compute.manager [-] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Took 1.03 seconds to deallocate network for instance. [ 631.223773] env[62585]: DEBUG nova.compute.claims [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 631.223976] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.275995] env[62585]: DEBUG oslo_concurrency.lockutils [req-522bd72d-aa00-4173-aa39-de1ec49ed6a5 req-574c5429-1191-4fbc-b347-3a7c76a96ca4 service nova] Releasing lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.430180] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d267d87e-afb9-4111-bec7-efdc1a9ec3a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.440831] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca84f82-ecd8-4c3c-bb18-50c0671bb836 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.475181] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2eed88c-4596-49a0-a821-9bfd98477336 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.486950] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5b35f2-dd66-4a1a-be13-9eda59314dfc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.500234] env[62585]: DEBUG nova.compute.provider_tree [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.537059] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Successfully created port: c69c9a27-7311-4a48-a33e-d1b8f1a78b15 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.605018] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 631.900158] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 631.900695] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 631.900908] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 631.901067] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 631.901243] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 631.901530] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 631.901530] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 631.901704] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 631.901862] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 631.902031] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 631.902187] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 631.902349] env[62585]: DEBUG nova.virt.hardware [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 631.903202] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b885ba3f-cb7c-4711-bb95-cf49d5cb9214 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.911210] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7a36f4-acde-4da8-9617-448799f52783 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.926259] env[62585]: ERROR nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Traceback (most recent call last): [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] yield resources [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self.driver.spawn(context, instance, image_meta, [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] vm_ref = self.build_virtual_machine(instance, [ 631.926259] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] for vif in network_info: [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] return self._sync_wrapper(fn, *args, **kwargs) [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self.wait() [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self[:] = self._gt.wait() [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] return self._exit_event.wait() [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 631.926602] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] current.throw(*self._exc) [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] result = function(*args, **kwargs) [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] return func(*args, **kwargs) [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] raise e [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] nwinfo = self.network_api.allocate_for_instance( [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] created_port_ids = self._update_ports_for_instance( [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] with excutils.save_and_reraise_exception(): [ 631.927248] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self.force_reraise() [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] raise self.value [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] updated_port = self._update_port( [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] _ensure_no_port_binding_failure(port) [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] raise exception.PortBindingFailed(port_id=port['id']) [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 631.927806] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] [ 631.927806] env[62585]: INFO nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Terminating instance [ 631.929358] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquiring lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.929480] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquired lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.929663] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.003234] env[62585]: DEBUG nova.scheduler.client.report [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.213426] env[62585]: DEBUG nova.compute.manager [req-5b2efe37-c5f9-4b9b-b51e-4d6bd97fdb1e req-ffb72d1c-667f-46ed-9813-096e0980a51e service nova] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Received event network-vif-deleted-3940fa4a-2718-4628-ad67-601bc1ecbabc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 632.456069] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.509362] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.509976] env[62585]: ERROR nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Traceback (most recent call last): [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self.driver.spawn(context, instance, image_meta, [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] vm_ref = self.build_virtual_machine(instance, [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.509976] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] for vif in network_info: [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return self._sync_wrapper(fn, *args, **kwargs) [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self.wait() [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self[:] = self._gt.wait() [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return self._exit_event.wait() [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] result = hub.switch() [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.510341] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return self.greenlet.switch() [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] result = function(*args, **kwargs) [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] return func(*args, **kwargs) [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] raise e [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] nwinfo = self.network_api.allocate_for_instance( [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] created_port_ids = self._update_ports_for_instance( [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] with excutils.save_and_reraise_exception(): [ 632.510755] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] self.force_reraise() [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] raise self.value [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] updated_port = self._update_port( [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] _ensure_no_port_binding_failure(port) [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] raise exception.PortBindingFailed(port_id=port['id']) [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] nova.exception.PortBindingFailed: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. [ 632.511119] env[62585]: ERROR nova.compute.manager [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] [ 632.511483] env[62585]: DEBUG nova.compute.utils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.512333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.979s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.513247] env[62585]: INFO nova.compute.claims [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.516998] env[62585]: ERROR nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 632.516998] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.516998] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.516998] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.516998] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.516998] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.516998] env[62585]: ERROR nova.compute.manager raise self.value [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.516998] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 632.516998] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.516998] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 632.517869] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.517869] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 632.517869] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 632.517869] env[62585]: ERROR nova.compute.manager [ 632.517869] env[62585]: Traceback (most recent call last): [ 632.517869] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 632.517869] env[62585]: listener.cb(fileno) [ 632.517869] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.517869] env[62585]: result = function(*args, **kwargs) [ 632.517869] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.517869] env[62585]: return func(*args, **kwargs) [ 632.517869] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.517869] env[62585]: raise e [ 632.517869] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.517869] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 632.517869] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.517869] env[62585]: created_port_ids = self._update_ports_for_instance( [ 632.517869] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.517869] env[62585]: with excutils.save_and_reraise_exception(): [ 632.517869] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.517869] env[62585]: self.force_reraise() [ 632.517869] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.517869] env[62585]: raise self.value [ 632.517869] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.517869] env[62585]: updated_port = self._update_port( [ 632.517869] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.517869] env[62585]: _ensure_no_port_binding_failure(port) [ 632.517869] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.517869] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 632.519029] env[62585]: nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 632.519029] env[62585]: Removing descriptor: 15 [ 632.519029] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Build of instance 29f9e25a-a0b2-4bb8-b59a-3617819d3be5 was re-scheduled: Binding failed for port 36005db7-d1cf-4fde-9210-7ed3242f300a, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 632.519029] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 632.519029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquiring lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.519029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Acquired lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.519302] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.551291] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.614183] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 632.640349] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.640594] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.640750] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.640928] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.641095] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.641231] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.641435] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.641592] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.641755] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.641989] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.642197] env[62585]: DEBUG nova.virt.hardware [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.643046] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88be5a1-972f-4678-a631-0f4c62046861 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.651817] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ad43be-fdca-4880-ad1a-a1d6c24d7c63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.664911] env[62585]: ERROR nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Traceback (most recent call last): [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] yield resources [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self.driver.spawn(context, instance, image_meta, [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] vm_ref = self.build_virtual_machine(instance, [ 632.664911] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] for vif in network_info: [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] return self._sync_wrapper(fn, *args, **kwargs) [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self.wait() [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self[:] = self._gt.wait() [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] return self._exit_event.wait() [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 632.665276] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] current.throw(*self._exc) [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] result = function(*args, **kwargs) [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] return func(*args, **kwargs) [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] raise e [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] nwinfo = self.network_api.allocate_for_instance( [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] created_port_ids = self._update_ports_for_instance( [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] with excutils.save_and_reraise_exception(): [ 632.665662] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self.force_reraise() [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] raise self.value [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] updated_port = self._update_port( [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] _ensure_no_port_binding_failure(port) [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] raise exception.PortBindingFailed(port_id=port['id']) [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 632.666109] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] [ 632.666109] env[62585]: INFO nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Terminating instance [ 632.667336] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquiring lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.667491] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquired lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.667654] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 633.038699] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.053968] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Releasing lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.054555] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 633.054820] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-424c6718-6a59-4412-8fad-3a1bd77497f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.063893] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530cb0e6-0e36-4ac6-a671-2f8e70318e09 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.086507] env[62585]: WARNING nova.virt.vmwareapi.driver [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c080105d-4a58-4616-b65c-7bac79dd93c1 could not be found. [ 633.086717] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.087555] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.089059] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b3349af-9e1f-43be-945b-d61339e6c047 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.096564] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad02e9f-a565-4c86-988e-f59640b0bc0b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.117868] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c080105d-4a58-4616-b65c-7bac79dd93c1 could not be found. [ 633.118079] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.118267] env[62585]: INFO nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Took 0.06 seconds to destroy the instance on the hypervisor. [ 633.118493] env[62585]: DEBUG oslo.service.loopingcall [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.118699] env[62585]: DEBUG nova.compute.manager [-] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 633.118793] env[62585]: DEBUG nova.network.neutron [-] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.132717] env[62585]: DEBUG nova.network.neutron [-] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.187434] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.279367] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.590276] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Releasing lock "refresh_cache-29f9e25a-a0b2-4bb8-b59a-3617819d3be5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.590490] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 633.590652] env[62585]: DEBUG nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 633.593059] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.610061] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.634782] env[62585]: DEBUG nova.network.neutron [-] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.783225] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Releasing lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.783630] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 633.783871] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.784475] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10e0a48b-c3ff-473c-b35a-fbda9c813ea3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.793311] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0293c204-37e9-4737-ae3c-f8b2be47cd1b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.816969] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 14557f1a-2410-4201-9b91-49d23f18d47a could not be found. [ 633.817219] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.817399] env[62585]: INFO nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 633.817635] env[62585]: DEBUG oslo.service.loopingcall [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.819947] env[62585]: DEBUG nova.compute.manager [-] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 633.820066] env[62585]: DEBUG nova.network.neutron [-] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.840447] env[62585]: DEBUG nova.network.neutron [-] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.853393] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09184025-6c33-4626-8edd-07394b627179 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.861093] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18308d0-601b-4b1e-8af6-42bcbcf56388 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.890182] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406e3176-7337-4538-85a6-70d6cfda40ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.897290] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c93fecf-a3df-49ed-8cb4-e658bc1b3456 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.911284] env[62585]: DEBUG nova.compute.provider_tree [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.115193] env[62585]: DEBUG nova.network.neutron [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.139851] env[62585]: INFO nova.compute.manager [-] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Took 1.02 seconds to deallocate network for instance. [ 634.239267] env[62585]: DEBUG nova.compute.manager [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Received event network-changed-c69c9a27-7311-4a48-a33e-d1b8f1a78b15 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 634.239479] env[62585]: DEBUG nova.compute.manager [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Refreshing instance network info cache due to event network-changed-c69c9a27-7311-4a48-a33e-d1b8f1a78b15. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 634.239699] env[62585]: DEBUG oslo_concurrency.lockutils [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] Acquiring lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.239819] env[62585]: DEBUG oslo_concurrency.lockutils [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] Acquired lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.240008] env[62585]: DEBUG nova.network.neutron [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Refreshing network info cache for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.345333] env[62585]: DEBUG nova.network.neutron [-] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.414633] env[62585]: DEBUG nova.scheduler.client.report [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 634.617712] env[62585]: INFO nova.compute.manager [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] [instance: 29f9e25a-a0b2-4bb8-b59a-3617819d3be5] Took 1.03 seconds to deallocate network for instance. [ 634.699922] env[62585]: INFO nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Took 0.56 seconds to detach 1 volumes for instance. [ 634.702210] env[62585]: DEBUG nova.compute.claims [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 634.702412] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.757487] env[62585]: DEBUG nova.network.neutron [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.837444] env[62585]: DEBUG nova.network.neutron [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.847479] env[62585]: INFO nova.compute.manager [-] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Took 1.03 seconds to deallocate network for instance. [ 634.849436] env[62585]: DEBUG nova.compute.claims [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 634.849611] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.919615] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.920147] env[62585]: DEBUG nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 634.922576] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.442s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.924676] env[62585]: INFO nova.compute.claims [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.339927] env[62585]: DEBUG oslo_concurrency.lockutils [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] Releasing lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.340153] env[62585]: DEBUG nova.compute.manager [req-d99aa78c-fbda-44e9-802e-9578edd3e299 req-507cbbd1-7314-4ec0-968e-f3eb2b8ec7d5 service nova] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Received event network-vif-deleted-c69c9a27-7311-4a48-a33e-d1b8f1a78b15 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 635.429389] env[62585]: DEBUG nova.compute.utils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 635.433080] env[62585]: DEBUG nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 635.654610] env[62585]: INFO nova.scheduler.client.report [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Deleted allocations for instance 29f9e25a-a0b2-4bb8-b59a-3617819d3be5 [ 635.933412] env[62585]: DEBUG nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 636.165466] env[62585]: DEBUG oslo_concurrency.lockutils [None req-302f3c9e-0494-4952-8623-1aa0274af071 tempest-DeleteServersAdminTestJSON-2135871450 tempest-DeleteServersAdminTestJSON-2135871450-project-member] Lock "29f9e25a-a0b2-4bb8-b59a-3617819d3be5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.874s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.199406] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48ab6c3-a1a4-4145-9368-c6f5a9823149 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.207079] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84cddb3-a59c-4545-b5dd-fb4568f00648 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.235737] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09304b07-917b-4210-95ed-48cc9e2a840c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.242597] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f403c9c1-5340-4422-b841-e7f514d9a0b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.255251] env[62585]: DEBUG nova.compute.provider_tree [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.669042] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 636.758624] env[62585]: DEBUG nova.scheduler.client.report [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.947044] env[62585]: DEBUG nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 636.972880] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 636.973148] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 636.973363] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 636.973479] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 636.973625] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 636.973800] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 636.974124] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 636.974283] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 636.974451] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 636.974609] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 636.974777] env[62585]: DEBUG nova.virt.hardware [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 636.975695] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0af292-eba7-4273-81ec-fd73f0c18f3b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.984543] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f380f62-78f3-4895-8435-a9be2ca60bf6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.999766] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.005888] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Creating folder: Project (ad2a462c51a84b03b4cbc2a591b71f2d). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 637.006206] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a66db0a1-1b3a-4997-9169-c33d4cc30d7b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.018057] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Created folder: Project (ad2a462c51a84b03b4cbc2a591b71f2d) in parent group-v293962. [ 637.018454] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Creating folder: Instances. Parent ref: group-v293983. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 637.018790] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-987dda8b-03a4-4285-a4ae-38d8304b35f8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.027640] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Created folder: Instances in parent group-v293983. [ 637.027727] env[62585]: DEBUG oslo.service.loopingcall [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.027866] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 637.028088] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8048d337-e2ba-49a8-81ca-2af75ea06b2e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.044868] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.044868] env[62585]: value = "task-1384665" [ 637.044868] env[62585]: _type = "Task" [ 637.044868] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.051687] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384665, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.191119] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.263577] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.264381] env[62585]: DEBUG nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 637.267472] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.751s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.269175] env[62585]: INFO nova.compute.claims [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.556008] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384665, 'name': CreateVM_Task, 'duration_secs': 0.247793} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.556201] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 637.556631] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.556789] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.557194] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 637.557450] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f15641f-8d82-4a63-98cc-678ea6d410d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.562137] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 637.562137] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e5b0d3-34b7-fb9f-21c4-749bcf0f644f" [ 637.562137] env[62585]: _type = "Task" [ 637.562137] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.570428] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e5b0d3-34b7-fb9f-21c4-749bcf0f644f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.774659] env[62585]: DEBUG nova.compute.utils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.776046] env[62585]: DEBUG nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 638.072119] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e5b0d3-34b7-fb9f-21c4-749bcf0f644f, 'name': SearchDatastore_Task, 'duration_secs': 0.012202} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.072424] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.072656] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.072895] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.073056] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.073236] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.073489] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b156105-2388-42ca-9ab2-fe0ab836f30c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.081070] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.081254] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.081924] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15e5a040-d629-4df4-9f62-0a67ac5f7918 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.086987] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 638.086987] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f8e11d-157b-ebe8-e728-a0f1efa05fdf" [ 638.086987] env[62585]: _type = "Task" [ 638.086987] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.094490] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f8e11d-157b-ebe8-e728-a0f1efa05fdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.279517] env[62585]: DEBUG nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 638.597665] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f8e11d-157b-ebe8-e728-a0f1efa05fdf, 'name': SearchDatastore_Task, 'duration_secs': 0.008267} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.600531] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc1b29ad-a9ad-41d7-8a06-13beb42a00e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.605479] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 638.605479] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5260655b-fe72-7b38-89ea-7b4e603cd048" [ 638.605479] env[62585]: _type = "Task" [ 638.605479] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.614785] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5260655b-fe72-7b38-89ea-7b4e603cd048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.628276] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52cf903-059d-4a09-b5a6-7925307d8a70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.634533] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56b640f-cc50-41db-9b42-97a2478eccd9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.663937] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf238402-6c5e-4d37-91a4-86100cbdfa46 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.671034] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ace979-33e9-415b-b7e1-68e08e4735fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.684126] env[62585]: DEBUG nova.compute.provider_tree [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.116383] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5260655b-fe72-7b38-89ea-7b4e603cd048, 'name': SearchDatastore_Task, 'duration_secs': 0.00943} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.116763] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.116926] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 16f01d66-44f8-4912-989a-48c39f667c95/16f01d66-44f8-4912-989a-48c39f667c95.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.117180] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67e10639-8879-4bea-8a6a-eb4cd3df4185 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.124308] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 639.124308] env[62585]: value = "task-1384666" [ 639.124308] env[62585]: _type = "Task" [ 639.124308] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.133428] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.166766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquiring lock "84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.167110] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Lock "84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.187383] env[62585]: DEBUG nova.scheduler.client.report [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 639.291449] env[62585]: DEBUG nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 639.319578] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.319901] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.320145] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.320425] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.320588] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.320738] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.320946] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.321121] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.321298] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.321453] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.321625] env[62585]: DEBUG nova.virt.hardware [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.322554] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cae1af3-5eab-4e8a-b6e3-cfce0078524b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.331213] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9091ceab-0fa7-4109-bd6b-8fc7651c2483 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.347374] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 639.353071] env[62585]: DEBUG oslo.service.loopingcall [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 639.353354] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 639.353574] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-307811cd-7f00-47f5-a5d1-7a82c7b2fa7d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.370683] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 639.370683] env[62585]: value = "task-1384667" [ 639.370683] env[62585]: _type = "Task" [ 639.370683] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.378958] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384667, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.635848] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502966} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.636320] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 16f01d66-44f8-4912-989a-48c39f667c95/16f01d66-44f8-4912-989a-48c39f667c95.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 639.636564] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 639.636830] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff2cc000-f7b9-4826-b658-dd22d30500a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.642634] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 639.642634] env[62585]: value = "task-1384668" [ 639.642634] env[62585]: _type = "Task" [ 639.642634] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.651367] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.692953] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.693503] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 639.696893] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.269s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.698545] env[62585]: INFO nova.compute.claims [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.881867] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384667, 'name': CreateVM_Task, 'duration_secs': 0.348914} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.881867] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.882605] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.882605] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.882605] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 639.882866] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be4a1ec-9ea3-4a98-8660-f5b9b1cd011b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.887434] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 639.887434] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b51a59-c811-a05c-11f4-6ec17433e8da" [ 639.887434] env[62585]: _type = "Task" [ 639.887434] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.894867] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b51a59-c811-a05c-11f4-6ec17433e8da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.152392] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081996} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.152737] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 640.153498] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f193cd79-b3ff-4983-8bff-e815fc4094fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.172825] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 16f01d66-44f8-4912-989a-48c39f667c95/16f01d66-44f8-4912-989a-48c39f667c95.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 640.173094] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ba887df-c8a6-4a98-b8b9-29dffb6785f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.191660] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 640.191660] env[62585]: value = "task-1384669" [ 640.191660] env[62585]: _type = "Task" [ 640.191660] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.199265] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.205311] env[62585]: DEBUG nova.compute.utils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.206461] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 640.207277] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.245540] env[62585]: DEBUG nova.policy [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a98e802c78964797981d96df67f57b77', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4cf959f5187042d3bde65fb404737d32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 640.399294] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b51a59-c811-a05c-11f4-6ec17433e8da, 'name': SearchDatastore_Task, 'duration_secs': 0.009281} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.399600] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.399846] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 640.400091] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.400240] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.400416] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 640.400682] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f48e9549-bb47-4a16-9ccc-d2f05ad609e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.408549] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 640.408720] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 640.409394] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0831fd29-c194-4305-8862-1521882e5c78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.414167] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 640.414167] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528ed35c-3672-a1ba-9f7b-563a7632cbfd" [ 640.414167] env[62585]: _type = "Task" [ 640.414167] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.421452] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528ed35c-3672-a1ba-9f7b-563a7632cbfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.514156] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Successfully created port: c5717a5c-e85f-489e-a493-1d22bc3d80a0 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.701387] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384669, 'name': ReconfigVM_Task, 'duration_secs': 0.320845} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.701654] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 16f01d66-44f8-4912-989a-48c39f667c95/16f01d66-44f8-4912-989a-48c39f667c95.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 640.702249] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-330fc7ed-0be4-4afe-83ef-10a08cec579b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.713051] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 640.715804] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 640.715804] env[62585]: value = "task-1384670" [ 640.715804] env[62585]: _type = "Task" [ 640.715804] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.724411] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384670, 'name': Rename_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.930990] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528ed35c-3672-a1ba-9f7b-563a7632cbfd, 'name': SearchDatastore_Task, 'duration_secs': 0.008093} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.934389] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d97fe1a5-a237-4117-a21a-c004933e4897 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.940656] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 640.940656] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5253b7e8-5034-e2ac-4f91-8bd26def2e90" [ 640.940656] env[62585]: _type = "Task" [ 640.940656] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.951570] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5253b7e8-5034-e2ac-4f91-8bd26def2e90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.081844] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8811dcec-ad49-4363-8d90-84457b5d47ee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.089349] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0508b9b8-30e9-4598-8185-70b52a1d644a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.121483] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b0ef10-c8f9-442a-8cbe-39edfa1fd506 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.133294] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7873a7-2070-4ac2-a25f-3233c33b9c3f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.152411] env[62585]: DEBUG nova.compute.provider_tree [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.231528] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384670, 'name': Rename_Task, 'duration_secs': 0.130349} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.231790] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 641.232042] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4b153e2-231b-4221-963c-3df1059ad329 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.243382] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 641.243382] env[62585]: value = "task-1384671" [ 641.243382] env[62585]: _type = "Task" [ 641.243382] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.249567] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.345426] env[62585]: DEBUG nova.compute.manager [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Received event network-changed-c5717a5c-e85f-489e-a493-1d22bc3d80a0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 641.345591] env[62585]: DEBUG nova.compute.manager [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Refreshing instance network info cache due to event network-changed-c5717a5c-e85f-489e-a493-1d22bc3d80a0. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 641.345643] env[62585]: DEBUG oslo_concurrency.lockutils [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] Acquiring lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.345773] env[62585]: DEBUG oslo_concurrency.lockutils [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] Acquired lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.345937] env[62585]: DEBUG nova.network.neutron [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Refreshing network info cache for port c5717a5c-e85f-489e-a493-1d22bc3d80a0 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.452034] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5253b7e8-5034-e2ac-4f91-8bd26def2e90, 'name': SearchDatastore_Task, 'duration_secs': 0.009206} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.452034] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.452034] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.452321] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdba7ce9-71bd-46a3-a0b9-70c70cdc43cd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.458371] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 641.458371] env[62585]: value = "task-1384672" [ 641.458371] env[62585]: _type = "Task" [ 641.458371] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.466324] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.505389] env[62585]: ERROR nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 641.505389] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.505389] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.505389] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.505389] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.505389] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.505389] env[62585]: ERROR nova.compute.manager raise self.value [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.505389] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 641.505389] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.505389] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 641.505804] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.505804] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 641.505804] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 641.505804] env[62585]: ERROR nova.compute.manager [ 641.505804] env[62585]: Traceback (most recent call last): [ 641.505804] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 641.505804] env[62585]: listener.cb(fileno) [ 641.505804] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.505804] env[62585]: result = function(*args, **kwargs) [ 641.505804] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 641.505804] env[62585]: return func(*args, **kwargs) [ 641.505804] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.505804] env[62585]: raise e [ 641.505804] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.505804] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 641.505804] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.505804] env[62585]: created_port_ids = self._update_ports_for_instance( [ 641.505804] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.505804] env[62585]: with excutils.save_and_reraise_exception(): [ 641.505804] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.505804] env[62585]: self.force_reraise() [ 641.505804] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.505804] env[62585]: raise self.value [ 641.505804] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.505804] env[62585]: updated_port = self._update_port( [ 641.505804] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.505804] env[62585]: _ensure_no_port_binding_failure(port) [ 641.505804] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.505804] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 641.506701] env[62585]: nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 641.506701] env[62585]: Removing descriptor: 17 [ 641.654707] env[62585]: DEBUG nova.scheduler.client.report [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 641.723798] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 641.750910] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 641.751256] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 641.751509] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.751509] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 641.751679] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.751804] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 641.752017] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 641.752240] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 641.752399] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 641.752562] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 641.752780] env[62585]: DEBUG nova.virt.hardware [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.753658] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776c85b6-afdc-45a5-9992-cb6cd2a1a5cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.760068] env[62585]: DEBUG oslo_vmware.api [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384671, 'name': PowerOnVM_Task, 'duration_secs': 0.504855} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.760536] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 641.760742] env[62585]: INFO nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Took 4.81 seconds to spawn the instance on the hypervisor. [ 641.760921] env[62585]: DEBUG nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 641.761712] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17011ad-5a78-4d47-8c8c-d7f49aa77160 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.767942] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6d87c1-1682-4476-8ba8-cbcac7b202c6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.786949] env[62585]: ERROR nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Traceback (most recent call last): [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] yield resources [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self.driver.spawn(context, instance, image_meta, [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] vm_ref = self.build_virtual_machine(instance, [ 641.786949] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] for vif in network_info: [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] return self._sync_wrapper(fn, *args, **kwargs) [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self.wait() [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self[:] = self._gt.wait() [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] return self._exit_event.wait() [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 641.787369] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] current.throw(*self._exc) [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] result = function(*args, **kwargs) [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] return func(*args, **kwargs) [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] raise e [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] nwinfo = self.network_api.allocate_for_instance( [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] created_port_ids = self._update_ports_for_instance( [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] with excutils.save_and_reraise_exception(): [ 641.787787] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self.force_reraise() [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] raise self.value [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] updated_port = self._update_port( [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] _ensure_no_port_binding_failure(port) [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] raise exception.PortBindingFailed(port_id=port['id']) [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 641.788240] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] [ 641.788240] env[62585]: INFO nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Terminating instance [ 641.789663] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquiring lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.875084] env[62585]: DEBUG nova.network.neutron [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.968276] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438572} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.968575] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 641.968833] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 641.969102] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d37de8c3-7c54-4795-a5fd-75ae3407e299 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.973943] env[62585]: DEBUG nova.network.neutron [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.976431] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 641.976431] env[62585]: value = "task-1384673" [ 641.976431] env[62585]: _type = "Task" [ 641.976431] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.983448] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384673, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.160139] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.160710] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 642.163459] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.568s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.165021] env[62585]: INFO nova.compute.claims [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.283810] env[62585]: INFO nova.compute.manager [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Took 26.77 seconds to build instance. [ 642.477924] env[62585]: DEBUG oslo_concurrency.lockutils [req-2cc5dc67-fc74-4ac3-bb49-f0211309ae5b req-bc9805c8-bca6-4136-a990-7a173428241f service nova] Releasing lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.478064] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquired lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.478206] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.487693] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059481} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.488459] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 642.489267] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10114220-925b-4448-9f59-709ca31ea981 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.508841] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 642.509320] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-306c9a75-2eb4-4876-b378-a3c07153d371 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.529134] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 642.529134] env[62585]: value = "task-1384674" [ 642.529134] env[62585]: _type = "Task" [ 642.529134] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.536859] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384674, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.670069] env[62585]: DEBUG nova.compute.utils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 642.672926] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 642.673113] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 642.724906] env[62585]: DEBUG nova.policy [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '177858a0aeaa476091e8d98f163a4b42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8469245fff25492d9688e13dc65f7d9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 642.785874] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2bb218c2-70c5-42d1-a999-1f2383762b96 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "16f01d66-44f8-4912-989a-48c39f667c95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.151s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.000891] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.041872] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.047459] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Successfully created port: 34e0888f-ed4b-4330-9155-39789e4652d1 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 643.098111] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.179650] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 643.288894] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 643.393270] env[62585]: DEBUG nova.compute.manager [req-40e64475-ee3d-4244-b873-29ae2f48ad55 req-00a9fcdc-146f-4e05-9407-159e2ca9a1c3 service nova] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Received event network-vif-deleted-c5717a5c-e85f-489e-a493-1d22bc3d80a0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 643.540126] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384674, 'name': ReconfigVM_Task, 'duration_secs': 0.964372} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.540348] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 643.541018] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-003568f7-1add-4033-bd3f-e0aa36fe0653 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.546717] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a35bca-dc1f-4597-9154-25bc0cbaf7e9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.550020] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 643.550020] env[62585]: value = "task-1384675" [ 643.550020] env[62585]: _type = "Task" [ 643.550020] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.555714] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bd8827-75af-43d4-af6c-8daaa9fc4040 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.561504] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384675, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.589710] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddf39e0-f3a1-4db4-83b3-cf360a807b17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.597146] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363eaf40-7083-4c86-9080-dd9d22a3a390 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.601188] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Releasing lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.601572] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 643.601762] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 643.602009] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f658dfa2-a50e-4bcd-9e10-75f06a315ccb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.613257] env[62585]: DEBUG nova.compute.provider_tree [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.618295] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a69917d-c24e-4c7a-9e4c-a29e4b829a32 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.642538] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3695a09f-dffc-4537-ac78-faffd6bdd252 could not be found. [ 643.642758] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 643.642934] env[62585]: INFO nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Took 0.04 seconds to destroy the instance on the hypervisor. [ 643.643190] env[62585]: DEBUG oslo.service.loopingcall [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.643428] env[62585]: DEBUG nova.compute.manager [-] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 643.643515] env[62585]: DEBUG nova.network.neutron [-] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.660171] env[62585]: DEBUG nova.network.neutron [-] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.806867] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.025072] env[62585]: ERROR nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 644.025072] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 644.025072] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 644.025072] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 644.025072] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.025072] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.025072] env[62585]: ERROR nova.compute.manager raise self.value [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 644.025072] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 644.025072] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.025072] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 644.025536] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.025536] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 644.025536] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 644.025536] env[62585]: ERROR nova.compute.manager [ 644.025536] env[62585]: Traceback (most recent call last): [ 644.025536] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 644.025536] env[62585]: listener.cb(fileno) [ 644.025536] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 644.025536] env[62585]: result = function(*args, **kwargs) [ 644.025536] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 644.025536] env[62585]: return func(*args, **kwargs) [ 644.025536] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 644.025536] env[62585]: raise e [ 644.025536] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 644.025536] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 644.025536] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 644.025536] env[62585]: created_port_ids = self._update_ports_for_instance( [ 644.025536] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 644.025536] env[62585]: with excutils.save_and_reraise_exception(): [ 644.025536] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.025536] env[62585]: self.force_reraise() [ 644.025536] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.025536] env[62585]: raise self.value [ 644.025536] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 644.025536] env[62585]: updated_port = self._update_port( [ 644.025536] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.025536] env[62585]: _ensure_no_port_binding_failure(port) [ 644.025536] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.025536] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 644.026252] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 644.026252] env[62585]: Removing descriptor: 17 [ 644.061325] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384675, 'name': Rename_Task, 'duration_secs': 0.173727} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.061593] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 644.061854] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d75ea19c-f6ae-46fa-b68a-82a193683c00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.068544] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 644.068544] env[62585]: value = "task-1384676" [ 644.068544] env[62585]: _type = "Task" [ 644.068544] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.076117] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384676, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.117407] env[62585]: DEBUG nova.scheduler.client.report [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.161143] env[62585]: DEBUG nova.network.neutron [-] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.193341] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 644.227874] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 644.228133] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 644.228292] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 644.228473] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 644.228615] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 644.228752] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 644.228951] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 644.229124] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 644.229293] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 644.229461] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 644.229618] env[62585]: DEBUG nova.virt.hardware [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 644.230488] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c3d242-47a5-4d93-8fb1-5432b97aec40 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.239021] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c453f4-96e2-4c17-880d-064760cee266 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.252843] env[62585]: ERROR nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Traceback (most recent call last): [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] yield resources [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self.driver.spawn(context, instance, image_meta, [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self._vmops.spawn(context, instance, image_meta, injected_files, [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] vm_ref = self.build_virtual_machine(instance, [ 644.252843] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] vif_infos = vmwarevif.get_vif_info(self._session, [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] for vif in network_info: [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] return self._sync_wrapper(fn, *args, **kwargs) [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self.wait() [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self[:] = self._gt.wait() [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] return self._exit_event.wait() [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 644.253324] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] current.throw(*self._exc) [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] result = function(*args, **kwargs) [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] return func(*args, **kwargs) [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] raise e [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] nwinfo = self.network_api.allocate_for_instance( [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] created_port_ids = self._update_ports_for_instance( [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] with excutils.save_and_reraise_exception(): [ 644.253765] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self.force_reraise() [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] raise self.value [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] updated_port = self._update_port( [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] _ensure_no_port_binding_failure(port) [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] raise exception.PortBindingFailed(port_id=port['id']) [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 644.254240] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] [ 644.254240] env[62585]: INFO nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Terminating instance [ 644.255398] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquiring lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.255659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquired lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.255756] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.578558] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384676, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.622748] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.623399] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.626046] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.703s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.664814] env[62585]: INFO nova.compute.manager [-] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Took 1.02 seconds to deallocate network for instance. [ 644.666972] env[62585]: DEBUG nova.compute.claims [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 644.667233] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.773674] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.856681] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.080882] env[62585]: DEBUG oslo_vmware.api [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384676, 'name': PowerOnVM_Task, 'duration_secs': 0.797714} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.081227] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.081437] env[62585]: INFO nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Took 5.79 seconds to spawn the instance on the hypervisor. [ 645.081611] env[62585]: DEBUG nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 645.082400] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af00bd42-3624-4551-b6d1-14642a707d1b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.129957] env[62585]: DEBUG nova.compute.utils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.134857] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 645.135046] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.200615] env[62585]: DEBUG nova.policy [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b6d211a90ac47d3acc6ec5e626b4a83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '029cbe525d8e4387ba9a47f387e120ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.359207] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Releasing lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.359636] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 645.359831] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 645.362149] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96d438c3-e996-477f-a054-00bf72b07b8c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.374481] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6224311b-b084-43cd-b01b-d223a9b7a90b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.403619] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 86e3d197-2e8c-4357-ac0a-e1af8e247024 could not be found. [ 645.403851] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 645.404050] env[62585]: INFO nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Took 0.04 seconds to destroy the instance on the hypervisor. [ 645.404296] env[62585]: DEBUG oslo.service.loopingcall [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.404852] env[62585]: DEBUG nova.compute.manager [-] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 645.404950] env[62585]: DEBUG nova.network.neutron [-] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.423808] env[62585]: DEBUG nova.network.neutron [-] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.438809] env[62585]: DEBUG nova.compute.manager [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Received event network-changed-34e0888f-ed4b-4330-9155-39789e4652d1 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.439105] env[62585]: DEBUG nova.compute.manager [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Refreshing instance network info cache due to event network-changed-34e0888f-ed4b-4330-9155-39789e4652d1. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 645.442280] env[62585]: DEBUG oslo_concurrency.lockutils [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] Acquiring lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.442280] env[62585]: DEBUG oslo_concurrency.lockutils [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] Acquired lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.442280] env[62585]: DEBUG nova.network.neutron [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Refreshing network info cache for port 34e0888f-ed4b-4330-9155-39789e4652d1 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.551120] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76f3f93-bdcb-4462-8aa7-f26c56a95864 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.558576] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749df943-f41b-40b9-b78d-d52622fa4a97 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.590337] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89317c3-9f8d-4497-9b6b-2208a0b0c488 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.598163] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Successfully created port: eebdbdfc-0983-4d19-a0f7-cda4fb115666 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.608344] env[62585]: INFO nova.compute.manager [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Took 26.15 seconds to build instance. [ 645.610191] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3d6720-b879-4ada-b044-be37c0e06468 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.624950] env[62585]: DEBUG nova.compute.provider_tree [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.638414] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 645.926729] env[62585]: DEBUG nova.network.neutron [-] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.961682] env[62585]: DEBUG nova.network.neutron [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.056402] env[62585]: DEBUG nova.network.neutron [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.116591] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7d45d568-3574-4b43-a466-410c63b96f8d tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "2cf0927d-8d98-4554-92ce-c049e1ea179c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.194s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.127931] env[62585]: DEBUG nova.scheduler.client.report [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.429169] env[62585]: INFO nova.compute.manager [-] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Took 1.02 seconds to deallocate network for instance. [ 646.431390] env[62585]: DEBUG nova.compute.claims [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 646.431563] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.544986] env[62585]: ERROR nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 646.544986] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.544986] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.544986] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.544986] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.544986] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.544986] env[62585]: ERROR nova.compute.manager raise self.value [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.544986] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 646.544986] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.544986] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 646.545448] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.545448] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 646.545448] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 646.545448] env[62585]: ERROR nova.compute.manager [ 646.545448] env[62585]: Traceback (most recent call last): [ 646.545448] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 646.545448] env[62585]: listener.cb(fileno) [ 646.545448] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.545448] env[62585]: result = function(*args, **kwargs) [ 646.545448] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.545448] env[62585]: return func(*args, **kwargs) [ 646.545448] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.545448] env[62585]: raise e [ 646.545448] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.545448] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 646.545448] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.545448] env[62585]: created_port_ids = self._update_ports_for_instance( [ 646.545448] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.545448] env[62585]: with excutils.save_and_reraise_exception(): [ 646.545448] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.545448] env[62585]: self.force_reraise() [ 646.545448] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.545448] env[62585]: raise self.value [ 646.545448] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.545448] env[62585]: updated_port = self._update_port( [ 646.545448] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.545448] env[62585]: _ensure_no_port_binding_failure(port) [ 646.545448] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.545448] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 646.546286] env[62585]: nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 646.546286] env[62585]: Removing descriptor: 17 [ 646.559636] env[62585]: DEBUG oslo_concurrency.lockutils [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] Releasing lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.559874] env[62585]: DEBUG nova.compute.manager [req-ffd25ba9-cbde-46f2-a1f2-419d44ceeecc req-b6204824-bcd0-4dbe-9385-9544ea0b5869 service nova] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Received event network-vif-deleted-34e0888f-ed4b-4330-9155-39789e4652d1 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 646.619958] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 646.633550] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.007s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.634285] env[62585]: ERROR nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Traceback (most recent call last): [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self.driver.spawn(context, instance, image_meta, [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] vm_ref = self.build_virtual_machine(instance, [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.634285] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] for vif in network_info: [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return self._sync_wrapper(fn, *args, **kwargs) [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self.wait() [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self[:] = self._gt.wait() [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return self._exit_event.wait() [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] result = hub.switch() [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.635246] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return self.greenlet.switch() [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] result = function(*args, **kwargs) [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] return func(*args, **kwargs) [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] raise e [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] nwinfo = self.network_api.allocate_for_instance( [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] created_port_ids = self._update_ports_for_instance( [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] with excutils.save_and_reraise_exception(): [ 646.635607] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] self.force_reraise() [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] raise self.value [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] updated_port = self._update_port( [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] _ensure_no_port_binding_failure(port) [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] raise exception.PortBindingFailed(port_id=port['id']) [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] nova.exception.PortBindingFailed: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. [ 646.635918] env[62585]: ERROR nova.compute.manager [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] [ 646.636340] env[62585]: DEBUG nova.compute.utils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 646.636379] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.976s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.638097] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Build of instance e883b58a-0fa6-48fd-a8a7-24ead857e6f1 was re-scheduled: Binding failed for port b4dcd767-35e9-4212-8e81-d4dac543de77, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 646.638507] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 646.638730] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquiring lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.638876] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Acquired lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.639044] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.648010] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 646.669986] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.670251] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.670403] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.670580] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.670723] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.670866] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.671097] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.672035] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.672035] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.672035] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.672035] env[62585]: DEBUG nova.virt.hardware [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.673614] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a262007e-0dd3-4970-a739-8a83c54ce657 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.682143] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd8c1ad-af5a-481c-9942-e08614a1f7b1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.698295] env[62585]: ERROR nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Traceback (most recent call last): [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] yield resources [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self.driver.spawn(context, instance, image_meta, [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] vm_ref = self.build_virtual_machine(instance, [ 646.698295] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] for vif in network_info: [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] return self._sync_wrapper(fn, *args, **kwargs) [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self.wait() [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self[:] = self._gt.wait() [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] return self._exit_event.wait() [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 646.698735] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] current.throw(*self._exc) [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] result = function(*args, **kwargs) [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] return func(*args, **kwargs) [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] raise e [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] nwinfo = self.network_api.allocate_for_instance( [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] created_port_ids = self._update_ports_for_instance( [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] with excutils.save_and_reraise_exception(): [ 646.699058] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self.force_reraise() [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] raise self.value [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] updated_port = self._update_port( [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] _ensure_no_port_binding_failure(port) [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] raise exception.PortBindingFailed(port_id=port['id']) [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 646.699610] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] [ 646.699610] env[62585]: INFO nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Terminating instance [ 646.699943] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquiring lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.700115] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquired lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.700404] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.906631] env[62585]: INFO nova.compute.manager [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Rebuilding instance [ 646.947287] env[62585]: DEBUG nova.compute.manager [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 646.948128] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222771c6-d462-4197-811f-234cb43312e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.147016] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.160216] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.225658] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.248812] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.279988] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.458227] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 647.458592] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d662a84-6537-4fd4-9a65-e8df814ed627 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.463629] env[62585]: DEBUG nova.compute.manager [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Received event network-changed-eebdbdfc-0983-4d19-a0f7-cda4fb115666 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.463777] env[62585]: DEBUG nova.compute.manager [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Refreshing instance network info cache due to event network-changed-eebdbdfc-0983-4d19-a0f7-cda4fb115666. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 647.464050] env[62585]: DEBUG oslo_concurrency.lockutils [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] Acquiring lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.466501] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 647.466501] env[62585]: value = "task-1384677" [ 647.466501] env[62585]: _type = "Task" [ 647.466501] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.474903] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384677, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.751817] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Releasing lock "refresh_cache-e883b58a-0fa6-48fd-a8a7-24ead857e6f1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.752097] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 647.752294] env[62585]: DEBUG nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 647.752461] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.767071] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.785729] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Releasing lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.786206] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 647.786402] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 647.786697] env[62585]: DEBUG oslo_concurrency.lockutils [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] Acquired lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.786866] env[62585]: DEBUG nova.network.neutron [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Refreshing network info cache for port eebdbdfc-0983-4d19-a0f7-cda4fb115666 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.788092] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52e4d63d-4803-477e-95e6-b17be0f4c9d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.797895] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d28622-0962-45d7-a434-737788c785f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.823661] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aae9ff25-f304-4dbe-824c-b17b3522655c could not be found. [ 647.823900] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.824093] env[62585]: INFO nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 647.824342] env[62585]: DEBUG oslo.service.loopingcall [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.824816] env[62585]: DEBUG nova.compute.manager [-] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 647.824915] env[62585]: DEBUG nova.network.neutron [-] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.841911] env[62585]: DEBUG nova.network.neutron [-] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.977253] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384677, 'name': PowerOffVM_Task, 'duration_secs': 0.148847} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.977598] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 647.977823] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 647.978588] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adda8d6-affb-48a4-b080-c123188be00c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.985686] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 647.985898] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d62f21b1-b10a-4f97-a9c5-c9937653a345 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.011906] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 648.011906] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 648.011906] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleting the datastore file [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 648.011906] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ded1d7c-57e0-4a50-ae0f-1a539a2cb3f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.018598] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 648.018598] env[62585]: value = "task-1384679" [ 648.018598] env[62585]: _type = "Task" [ 648.018598] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.026277] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.169982] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance e883b58a-0fa6-48fd-a8a7-24ead857e6f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.170182] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 5cd813d1-f778-4c8a-920b-64e92a3b52af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170309] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance c080105d-4a58-4616-b65c-7bac79dd93c1 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170429] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 14557f1a-2410-4201-9b91-49d23f18d47a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170545] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 16f01d66-44f8-4912-989a-48c39f667c95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170657] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 2cf0927d-8d98-4554-92ce-c049e1ea179c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170768] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 3695a09f-dffc-4537-ac78-faffd6bdd252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170879] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 86e3d197-2e8c-4357-ac0a-e1af8e247024 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.170989] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance aae9ff25-f304-4dbe-824c-b17b3522655c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.269938] env[62585]: DEBUG nova.network.neutron [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.304577] env[62585]: DEBUG nova.network.neutron [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.344088] env[62585]: DEBUG nova.network.neutron [-] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.506530] env[62585]: DEBUG nova.network.neutron [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.527550] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384679, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086684} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.527687] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 648.529550] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 648.529550] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.675049] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 3b50dbde-2969-4a4b-ae35-42416342a60b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.772473] env[62585]: INFO nova.compute.manager [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] [instance: e883b58a-0fa6-48fd-a8a7-24ead857e6f1] Took 1.02 seconds to deallocate network for instance. [ 648.846792] env[62585]: INFO nova.compute.manager [-] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Took 1.02 seconds to deallocate network for instance. [ 648.848944] env[62585]: DEBUG nova.compute.claims [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 648.849135] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.009975] env[62585]: DEBUG oslo_concurrency.lockutils [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] Releasing lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.010306] env[62585]: DEBUG nova.compute.manager [req-3b97f61f-774a-47be-91fb-b47a9960333a req-9202fb81-224b-4d15-bdb4-ec6ab0067bb6 service nova] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Received event network-vif-deleted-eebdbdfc-0983-4d19-a0f7-cda4fb115666 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 649.177779] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 1cad8d1b-ed02-424c-879c-2f23d4d90b22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 649.563123] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.563394] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.563531] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.563709] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.563852] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.563995] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.564222] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.564379] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.564562] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.564698] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.564865] env[62585]: DEBUG nova.virt.hardware [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.565722] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b81c4da-311f-4ec7-8157-10854b494004 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.574387] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7be1c3-2b64-48ce-8f74-16bacd05b6ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.588766] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.594147] env[62585]: DEBUG oslo.service.loopingcall [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.594389] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 649.594590] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac692e20-18d6-46b5-81c6-e61616806151 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.611557] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.611557] env[62585]: value = "task-1384680" [ 649.611557] env[62585]: _type = "Task" [ 649.611557] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.619586] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384680, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.680656] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance ed0ec962-3c4e-409f-9332-0a79ca1c6ed3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 649.804403] env[62585]: INFO nova.scheduler.client.report [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Deleted allocations for instance e883b58a-0fa6-48fd-a8a7-24ead857e6f1 [ 650.122470] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384680, 'name': CreateVM_Task, 'duration_secs': 0.228455} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.122655] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.123091] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.123261] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.123570] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 650.123816] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e54ca4ce-0385-4c69-8103-e887e0cc2380 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.128165] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 650.128165] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6a194-ed37-67b0-6743-70762a882299" [ 650.128165] env[62585]: _type = "Task" [ 650.128165] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.135530] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6a194-ed37-67b0-6743-70762a882299, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.187670] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance ae66c3e2-eac8-4239-b5be-64dc0dcf2c04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.313089] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b582aff8-01dd-43d5-98c1-479680f34533 tempest-ImagesOneServerNegativeTestJSON-582047677 tempest-ImagesOneServerNegativeTestJSON-582047677-project-member] Lock "e883b58a-0fa6-48fd-a8a7-24ead857e6f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.901s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.638922] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6a194-ed37-67b0-6743-70762a882299, 'name': SearchDatastore_Task, 'duration_secs': 0.008376} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.639231] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.639462] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.639693] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.641240] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.641240] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.641240] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e0dd85d-9290-452d-ab57-c7a6e3a63b6d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.648657] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.648836] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 650.649561] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee2749bd-e38d-4ac1-a2ea-a44deb225ad7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.654500] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 650.654500] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]523faf8f-89a3-90ec-d93e-b31aeac7194a" [ 650.654500] env[62585]: _type = "Task" [ 650.654500] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.664522] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]523faf8f-89a3-90ec-d93e-b31aeac7194a, 'name': SearchDatastore_Task, 'duration_secs': 0.007699} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.665216] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91ec193c-e636-4bfa-90d3-d3a3599ea22e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.670117] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 650.670117] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52593021-43b5-6b70-d97f-8663ccea9fa3" [ 650.670117] env[62585]: _type = "Task" [ 650.670117] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.678309] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52593021-43b5-6b70-d97f-8663ccea9fa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.691518] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 0b4d919f-552e-489e-bcfb-f6447cf81fb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.816432] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 651.181683] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52593021-43b5-6b70-d97f-8663ccea9fa3, 'name': SearchDatastore_Task, 'duration_secs': 0.00768} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.181954] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.182228] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 651.182480] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe92c9d2-5d6d-48e8-b110-d7aa1cfb0f48 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.189667] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 651.189667] env[62585]: value = "task-1384681" [ 651.189667] env[62585]: _type = "Task" [ 651.189667] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.193560] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 4557a853-232e-49e5-9052-ebf54d68e998 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 651.200663] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.344195] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.696351] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 72cdccb7-b398-4833-af82-d64222c83f8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 651.701261] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452795} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.701781] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.702058] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 651.702350] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-243d9025-bd65-44eb-88d9-cc19b9f66b49 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.709664] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 651.709664] env[62585]: value = "task-1384682" [ 651.709664] env[62585]: _type = "Task" [ 651.709664] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.718162] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.203631] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 53e10c33-0f41-48a2-ac19-c0b34a9a9312 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.220414] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066389} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.221414] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 652.223131] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed43a6bd-d3fb-455d-91c9-87b04b56cadf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.244781] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 652.245312] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eedc4d1a-105b-4477-855b-857e2d84d9dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.265279] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 652.265279] env[62585]: value = "task-1384683" [ 652.265279] env[62585]: _type = "Task" [ 652.265279] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.274149] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.706591] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 424fc272-b4b9-4867-a083-b27abe308f81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.779143] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384683, 'name': ReconfigVM_Task, 'duration_secs': 0.309265} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.779526] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c/2cf0927d-8d98-4554-92ce-c049e1ea179c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 652.781030] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcb2bd6c-9d97-42b5-9ae0-aecf60d10305 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.787219] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 652.787219] env[62585]: value = "task-1384684" [ 652.787219] env[62585]: _type = "Task" [ 652.787219] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.797566] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384684, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.212662] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 0049c4a4-dfc2-4968-8ab1-61c344f32e6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.298725] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384684, 'name': Rename_Task, 'duration_secs': 0.160726} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.298986] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 653.299234] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40ff670b-d9b5-4757-b543-7051c44d9630 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.306222] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 653.306222] env[62585]: value = "task-1384685" [ 653.306222] env[62585]: _type = "Task" [ 653.306222] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.317281] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384685, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.716397] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.817983] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384685, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.219718] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 2fccf900-e294-4d66-93c5-d1c7570c5d7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.319733] env[62585]: DEBUG oslo_vmware.api [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384685, 'name': PowerOnVM_Task, 'duration_secs': 0.757292} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.320067] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 654.320319] env[62585]: DEBUG nova.compute.manager [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 654.321250] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb97bb2-23a6-43ce-b9e4-8499317a3141 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.724421] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 7504c221-2d27-4dc6-9100-9a2dca2a6036 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.839688] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.228372] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance a0512ab3-1248-4f38-8ed9-249ba5a2d488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.518478] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.518708] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.734199] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d536e668-d597-4f8e-8d61-974e072b48c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.891241] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "2cf0927d-8d98-4554-92ce-c049e1ea179c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.891528] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "2cf0927d-8d98-4554-92ce-c049e1ea179c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.891753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "2cf0927d-8d98-4554-92ce-c049e1ea179c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.891934] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "2cf0927d-8d98-4554-92ce-c049e1ea179c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.892113] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "2cf0927d-8d98-4554-92ce-c049e1ea179c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.894486] env[62585]: INFO nova.compute.manager [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Terminating instance [ 655.896215] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "refresh_cache-2cf0927d-8d98-4554-92ce-c049e1ea179c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.896431] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "refresh_cache-2cf0927d-8d98-4554-92ce-c049e1ea179c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.896533] env[62585]: DEBUG nova.network.neutron [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.238298] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance f03bdd4b-e75e-4d70-84b3-126d2296994f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.418074] env[62585]: DEBUG nova.network.neutron [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.485694] env[62585]: DEBUG nova.network.neutron [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.741783] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.741783] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 656.741783] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 656.986776] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "refresh_cache-2cf0927d-8d98-4554-92ce-c049e1ea179c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.987250] env[62585]: DEBUG nova.compute.manager [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 656.987456] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 656.988321] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edb4eab-5373-4b09-9e23-16f34baa5394 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.995721] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 656.998198] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cc13e8f-3ced-4bd6-9655-16ee1b990888 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.004524] env[62585]: DEBUG oslo_vmware.api [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 657.004524] env[62585]: value = "task-1384686" [ 657.004524] env[62585]: _type = "Task" [ 657.004524] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.016681] env[62585]: DEBUG oslo_vmware.api [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.032226] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991427bb-fd20-438d-8ec1-f8253618a3f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.039450] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416bdc74-9e7a-44d3-81e2-2e047c415a78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.069150] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e749d70b-0dbf-45f2-90c2-93d9d3994161 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.076650] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0249c9cd-6592-45ea-98b6-34d2ec490e6d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.090300] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.514282] env[62585]: DEBUG oslo_vmware.api [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384686, 'name': PowerOffVM_Task, 'duration_secs': 0.120693} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.514527] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 657.514689] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 657.514932] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-866f1a47-1be6-4800-a868-fcaf8e06a370 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.543897] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 657.544207] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 657.544422] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleting the datastore file [datastore1] 2cf0927d-8d98-4554-92ce-c049e1ea179c {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 657.544688] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fce83568-d23c-401b-b72e-a4a791920876 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.551062] env[62585]: DEBUG oslo_vmware.api [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 657.551062] env[62585]: value = "task-1384688" [ 657.551062] env[62585]: _type = "Task" [ 657.551062] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.559565] env[62585]: DEBUG oslo_vmware.api [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.593244] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 658.061681] env[62585]: DEBUG oslo_vmware.api [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096382} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.061947] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 658.062146] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 658.062322] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.062495] env[62585]: INFO nova.compute.manager [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Took 1.08 seconds to destroy the instance on the hypervisor. [ 658.062732] env[62585]: DEBUG oslo.service.loopingcall [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.062922] env[62585]: DEBUG nova.compute.manager [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 658.063027] env[62585]: DEBUG nova.network.neutron [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 658.079210] env[62585]: DEBUG nova.network.neutron [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.098015] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 658.098253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.462s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.098663] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.875s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.582461] env[62585]: DEBUG nova.network.neutron [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.894492] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b438b97-aa91-4051-9e52-932fb3e3b4fb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.901756] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff1891a-46e5-471b-9e90-fe6cd0145b8d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.930908] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed945caa-aa03-4cb1-ad68-5f288bce62cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.938047] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382db6e5-cd58-4509-a0fa-9ad9472aeb8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.950853] env[62585]: DEBUG nova.compute.provider_tree [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.087096] env[62585]: INFO nova.compute.manager [-] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Took 1.02 seconds to deallocate network for instance. [ 659.454551] env[62585]: DEBUG nova.scheduler.client.report [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.594402] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.959518] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.960182] env[62585]: ERROR nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Traceback (most recent call last): [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self.driver.spawn(context, instance, image_meta, [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] vm_ref = self.build_virtual_machine(instance, [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.960182] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] for vif in network_info: [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return self._sync_wrapper(fn, *args, **kwargs) [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self.wait() [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self[:] = self._gt.wait() [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return self._exit_event.wait() [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] result = hub.switch() [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 659.960615] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return self.greenlet.switch() [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] result = function(*args, **kwargs) [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] return func(*args, **kwargs) [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] raise e [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] nwinfo = self.network_api.allocate_for_instance( [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] created_port_ids = self._update_ports_for_instance( [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] with excutils.save_and_reraise_exception(): [ 659.960957] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] self.force_reraise() [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] raise self.value [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] updated_port = self._update_port( [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] _ensure_no_port_binding_failure(port) [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] raise exception.PortBindingFailed(port_id=port['id']) [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] nova.exception.PortBindingFailed: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. [ 659.961384] env[62585]: ERROR nova.compute.manager [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] [ 659.961670] env[62585]: DEBUG nova.compute.utils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 659.962560] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.260s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.966219] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Build of instance 5cd813d1-f778-4c8a-920b-64e92a3b52af was re-scheduled: Binding failed for port 4279ede4-346d-4574-b592-11021eb73bce, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 659.966638] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 659.966971] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquiring lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.967168] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Acquired lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.967347] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.491602] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.605484] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.833548] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f86aff-88ff-4a74-a730-caa3e6b4a095 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.843472] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182437fb-d4d1-4cca-a0c1-ba2e9ccf9057 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.874843] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0220bf90-7601-4972-b0ae-14cdf61b9df1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.884429] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f28f26-3cd8-4184-8657-e3fe48ad1dc5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.897955] env[62585]: DEBUG nova.compute.provider_tree [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.111673] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Releasing lock "refresh_cache-5cd813d1-f778-4c8a-920b-64e92a3b52af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.111904] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 661.112101] env[62585]: DEBUG nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 661.112274] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 661.130095] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.401434] env[62585]: DEBUG nova.scheduler.client.report [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 661.633500] env[62585]: DEBUG nova.network.neutron [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.908351] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.909740] env[62585]: ERROR nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Traceback (most recent call last): [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self.driver.spawn(context, instance, image_meta, [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] vm_ref = self.build_virtual_machine(instance, [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.909740] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] for vif in network_info: [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] return self._sync_wrapper(fn, *args, **kwargs) [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self.wait() [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self[:] = self._gt.wait() [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] return self._exit_event.wait() [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] current.throw(*self._exc) [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.910088] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] result = function(*args, **kwargs) [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] return func(*args, **kwargs) [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] raise e [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] nwinfo = self.network_api.allocate_for_instance( [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] created_port_ids = self._update_ports_for_instance( [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] with excutils.save_and_reraise_exception(): [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] self.force_reraise() [ 661.910393] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] raise self.value [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] updated_port = self._update_port( [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] _ensure_no_port_binding_failure(port) [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] raise exception.PortBindingFailed(port_id=port['id']) [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] nova.exception.PortBindingFailed: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. [ 661.910757] env[62585]: ERROR nova.compute.manager [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] [ 661.910757] env[62585]: DEBUG nova.compute.utils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 661.912253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.062s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.915693] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Build of instance c080105d-4a58-4616-b65c-7bac79dd93c1 was re-scheduled: Binding failed for port 3940fa4a-2718-4628-ad67-601bc1ecbabc, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 661.916229] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 661.916486] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquiring lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.916657] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Acquired lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.916835] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.135778] env[62585]: INFO nova.compute.manager [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] [instance: 5cd813d1-f778-4c8a-920b-64e92a3b52af] Took 1.02 seconds to deallocate network for instance. [ 662.442669] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.526331] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.742105] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9348f4-3a5e-4b3c-86ca-32fd0e28aad9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.750010] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e262bd0f-df17-4cff-9bc4-3fcec826beaf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.778623] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b263469e-ba5c-40f5-b502-b1019d323bb2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.785283] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471f125d-80b3-40e3-bfd2-9fd640ca52d9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.798063] env[62585]: DEBUG nova.compute.provider_tree [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.029084] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Releasing lock "refresh_cache-c080105d-4a58-4616-b65c-7bac79dd93c1" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.029084] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 663.029084] env[62585]: DEBUG nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 663.029279] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.044584] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.168735] env[62585]: INFO nova.scheduler.client.report [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Deleted allocations for instance 5cd813d1-f778-4c8a-920b-64e92a3b52af [ 663.302098] env[62585]: DEBUG nova.scheduler.client.report [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 663.547513] env[62585]: DEBUG nova.network.neutron [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.677757] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f71dd8b-a833-426d-a656-bfa28bb94c5a tempest-ServerActionsTestOtherB-1300611293 tempest-ServerActionsTestOtherB-1300611293-project-member] Lock "5cd813d1-f778-4c8a-920b-64e92a3b52af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.876s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.807354] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.896s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.809366] env[62585]: ERROR nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Traceback (most recent call last): [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self.driver.spawn(context, instance, image_meta, [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] vm_ref = self.build_virtual_machine(instance, [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] vif_infos = vmwarevif.get_vif_info(self._session, [ 663.809366] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] for vif in network_info: [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] return self._sync_wrapper(fn, *args, **kwargs) [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self.wait() [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self[:] = self._gt.wait() [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] return self._exit_event.wait() [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] current.throw(*self._exc) [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 663.809756] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] result = function(*args, **kwargs) [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] return func(*args, **kwargs) [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] raise e [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] nwinfo = self.network_api.allocate_for_instance( [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] created_port_ids = self._update_ports_for_instance( [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] with excutils.save_and_reraise_exception(): [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] self.force_reraise() [ 663.810154] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] raise self.value [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] updated_port = self._update_port( [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] _ensure_no_port_binding_failure(port) [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] raise exception.PortBindingFailed(port_id=port['id']) [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] nova.exception.PortBindingFailed: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. [ 663.810567] env[62585]: ERROR nova.compute.manager [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] [ 663.810567] env[62585]: DEBUG nova.compute.utils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 663.810861] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.619s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.812198] env[62585]: INFO nova.compute.claims [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.816560] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Build of instance 14557f1a-2410-4201-9b91-49d23f18d47a was re-scheduled: Binding failed for port c69c9a27-7311-4a48-a33e-d1b8f1a78b15, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 663.816560] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 663.816560] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquiring lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.816560] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Acquired lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.816806] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.051407] env[62585]: INFO nova.compute.manager [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] [instance: c080105d-4a58-4616-b65c-7bac79dd93c1] Took 1.02 seconds to deallocate network for instance. [ 664.183093] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 664.338887] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.416074] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.706710] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.924438] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Releasing lock "refresh_cache-14557f1a-2410-4201-9b91-49d23f18d47a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.924838] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 664.924838] env[62585]: DEBUG nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 664.925040] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.944241] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.085219] env[62585]: INFO nova.scheduler.client.report [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Deleted allocations for instance c080105d-4a58-4616-b65c-7bac79dd93c1 [ 665.180623] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f063491-dc89-43f5-8ee3-d01a105be9cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.188797] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d58d9c-d43e-487e-bd5d-bbf8946fef7e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.223196] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8107c1-e457-4ee1-ac9a-5f08fe89ceb4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.230826] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d810d5b2-4a07-4d74-adf2-2bb6c8088980 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.245161] env[62585]: DEBUG nova.compute.provider_tree [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.448072] env[62585]: DEBUG nova.network.neutron [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.599537] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b54d72ec-c330-40ea-aa31-7669d2de1fb9 tempest-ServersTestBootFromVolume-1715125284 tempest-ServersTestBootFromVolume-1715125284-project-member] Lock "c080105d-4a58-4616-b65c-7bac79dd93c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.189s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.749028] env[62585]: DEBUG nova.scheduler.client.report [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.952909] env[62585]: INFO nova.compute.manager [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] [instance: 14557f1a-2410-4201-9b91-49d23f18d47a] Took 1.02 seconds to deallocate network for instance. [ 666.102328] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 666.255545] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.256036] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 666.258750] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.452s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.260168] env[62585]: INFO nova.compute.claims [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 666.628052] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.764500] env[62585]: DEBUG nova.compute.utils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 666.768090] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 666.768339] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 666.868918] env[62585]: DEBUG nova.policy [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5db548f8d8ee4db383f03d417c896a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b11f7597d0aa4cb28fed83803589041b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 666.984798] env[62585]: INFO nova.scheduler.client.report [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Deleted allocations for instance 14557f1a-2410-4201-9b91-49d23f18d47a [ 667.270851] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 667.390923] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Successfully created port: 531674da-2207-4a18-93cd-5279aed15d9c {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.495197] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8f209add-e37b-492b-bb3b-d82a6cc3aa1d tempest-ServersTestFqdnHostnames-606995664 tempest-ServersTestFqdnHostnames-606995664-project-member] Lock "14557f1a-2410-4201-9b91-49d23f18d47a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.036s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.624667] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2cfbb2-15e7-46db-9c40-2e057e00b0fb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.633677] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed07880-9699-4c63-b902-28ee5ecf824b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.669560] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27a4949-2569-44d1-a550-e067224fc590 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.677716] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe3f3f3-7c25-477f-a3af-6e751b0fe8fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.693249] env[62585]: DEBUG nova.compute.provider_tree [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.998413] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 668.197712] env[62585]: DEBUG nova.scheduler.client.report [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 668.283058] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 668.318147] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T10:00:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d37a896f-d647-418a-b1cb-72de34134e15',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-706566477',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 668.318668] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 668.318842] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 668.319139] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 668.319209] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 668.319538] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 668.319538] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 668.320236] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 668.320236] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 668.320236] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 668.320236] env[62585]: DEBUG nova.virt.hardware [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 668.321220] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3132e0b2-b5d9-4ec1-b9d4-a4b5c11b14f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.329290] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd57a18-9b64-4d07-8192-afd7370d9b41 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.521303] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.705086] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.705086] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 668.706369] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.039s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.752391] env[62585]: DEBUG nova.compute.manager [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Received event network-changed-531674da-2207-4a18-93cd-5279aed15d9c {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 668.752391] env[62585]: DEBUG nova.compute.manager [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Refreshing instance network info cache due to event network-changed-531674da-2207-4a18-93cd-5279aed15d9c. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 668.752391] env[62585]: DEBUG oslo_concurrency.lockutils [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] Acquiring lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.752391] env[62585]: DEBUG oslo_concurrency.lockutils [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] Acquired lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.752391] env[62585]: DEBUG nova.network.neutron [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Refreshing network info cache for port 531674da-2207-4a18-93cd-5279aed15d9c {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.972217] env[62585]: ERROR nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 668.972217] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.972217] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.972217] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.972217] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.972217] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.972217] env[62585]: ERROR nova.compute.manager raise self.value [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.972217] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 668.972217] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.972217] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 668.972670] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.972670] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 668.972670] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 668.972670] env[62585]: ERROR nova.compute.manager [ 668.972670] env[62585]: Traceback (most recent call last): [ 668.972670] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 668.972670] env[62585]: listener.cb(fileno) [ 668.972670] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.972670] env[62585]: result = function(*args, **kwargs) [ 668.972670] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 668.972670] env[62585]: return func(*args, **kwargs) [ 668.972670] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.972670] env[62585]: raise e [ 668.972670] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.972670] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 668.972670] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.972670] env[62585]: created_port_ids = self._update_ports_for_instance( [ 668.972670] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.972670] env[62585]: with excutils.save_and_reraise_exception(): [ 668.972670] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.972670] env[62585]: self.force_reraise() [ 668.972670] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.972670] env[62585]: raise self.value [ 668.972670] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.972670] env[62585]: updated_port = self._update_port( [ 668.972670] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.972670] env[62585]: _ensure_no_port_binding_failure(port) [ 668.972670] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.972670] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 668.973407] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 668.973407] env[62585]: Removing descriptor: 15 [ 668.973407] env[62585]: ERROR nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Traceback (most recent call last): [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] yield resources [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self.driver.spawn(context, instance, image_meta, [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 668.973407] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] vm_ref = self.build_virtual_machine(instance, [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] vif_infos = vmwarevif.get_vif_info(self._session, [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] for vif in network_info: [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return self._sync_wrapper(fn, *args, **kwargs) [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self.wait() [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self[:] = self._gt.wait() [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return self._exit_event.wait() [ 668.973706] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] result = hub.switch() [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return self.greenlet.switch() [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] result = function(*args, **kwargs) [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return func(*args, **kwargs) [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] raise e [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] nwinfo = self.network_api.allocate_for_instance( [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.974028] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] created_port_ids = self._update_ports_for_instance( [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] with excutils.save_and_reraise_exception(): [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self.force_reraise() [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] raise self.value [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] updated_port = self._update_port( [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] _ensure_no_port_binding_failure(port) [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.974437] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] raise exception.PortBindingFailed(port_id=port['id']) [ 668.974747] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 668.974747] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] [ 668.974747] env[62585]: INFO nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Terminating instance [ 668.977014] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.215789] env[62585]: DEBUG nova.compute.utils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 669.218013] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 669.218206] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 669.267816] env[62585]: DEBUG nova.policy [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc899587bee748019cb97be3061bbbf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeb8739d28674ba398d3d2ee2ddcf81f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 669.284794] env[62585]: DEBUG nova.network.neutron [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.405228] env[62585]: DEBUG nova.network.neutron [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.620941] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f877593-a96d-46b2-a00d-6e69dc9a25d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.629518] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978822de-39cd-45d4-959c-8c90e2eba181 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.667755] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90781b3b-d825-4445-999c-6bc959a17515 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.674673] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0774e1-77ee-4713-909c-d37a25a2eefd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.694793] env[62585]: DEBUG nova.compute.provider_tree [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.721468] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 669.798408] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Successfully created port: 02f0d99b-ed9d-4b15-82e5-aa932eb0287d {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.911027] env[62585]: DEBUG oslo_concurrency.lockutils [req-5ff548c0-e057-49af-94b1-e857a3a13af3 req-f8e935bc-0f48-41bf-8543-3356269383d1 service nova] Releasing lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.911027] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquired lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.911353] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.198381] env[62585]: DEBUG nova.scheduler.client.report [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.450385] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.586669] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.706102] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.999s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.706374] env[62585]: ERROR nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Traceback (most recent call last): [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self.driver.spawn(context, instance, image_meta, [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] vm_ref = self.build_virtual_machine(instance, [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.706374] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] for vif in network_info: [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] return self._sync_wrapper(fn, *args, **kwargs) [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self.wait() [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self[:] = self._gt.wait() [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] return self._exit_event.wait() [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] current.throw(*self._exc) [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.706710] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] result = function(*args, **kwargs) [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] return func(*args, **kwargs) [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] raise e [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] nwinfo = self.network_api.allocate_for_instance( [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] created_port_ids = self._update_ports_for_instance( [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] with excutils.save_and_reraise_exception(): [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] self.force_reraise() [ 670.707062] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] raise self.value [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] updated_port = self._update_port( [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] _ensure_no_port_binding_failure(port) [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] raise exception.PortBindingFailed(port_id=port['id']) [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] nova.exception.PortBindingFailed: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. [ 670.707421] env[62585]: ERROR nova.compute.manager [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] [ 670.707421] env[62585]: DEBUG nova.compute.utils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 670.708396] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.277s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.711441] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Build of instance 3695a09f-dffc-4537-ac78-faffd6bdd252 was re-scheduled: Binding failed for port c5717a5c-e85f-489e-a493-1d22bc3d80a0, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 670.712393] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 670.712393] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquiring lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.712393] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Acquired lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.712393] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.736088] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 670.773175] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 670.773478] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 670.773602] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.776013] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 670.776013] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.776013] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 670.776013] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 670.776013] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 670.776230] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 670.776230] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 670.776230] env[62585]: DEBUG nova.virt.hardware [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 670.776352] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cdb3f4-952b-46a8-bc8e-2a41283a941d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.787636] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3897c2-d566-40b5-8f4d-e7a7f200c263 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.792955] env[62585]: DEBUG nova.compute.manager [req-409c6e0e-5dfe-4cad-bf0e-355cb9745c88 req-d1bc4529-2a1c-4be8-9fa0-5dc6151b239c service nova] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Received event network-vif-deleted-531674da-2207-4a18-93cd-5279aed15d9c {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 670.797696] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquiring lock "b1587330-1740-4bfd-a0c3-a25794c3ccd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.797931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Lock "b1587330-1740-4bfd-a0c3-a25794c3ccd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.091416] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Releasing lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.091416] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 671.091416] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 671.091416] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11d12274-8d8a-4ffd-b074-a78741570fb2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.099854] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9d3a34-f592-4356-8720-c988cb689e34 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.125689] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3b50dbde-2969-4a4b-ae35-42416342a60b could not be found. [ 671.126057] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 671.128733] env[62585]: INFO nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 671.128733] env[62585]: DEBUG oslo.service.loopingcall [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.128733] env[62585]: DEBUG nova.compute.manager [-] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.128733] env[62585]: DEBUG nova.network.neutron [-] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.143214] env[62585]: DEBUG nova.network.neutron [-] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.246450] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.355525] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.595363] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4601368c-9321-4076-8fb2-8a5745fba1c5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.604848] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fefd94-024f-474a-a94e-8da3fd066b9f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.641316] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77315e65-975d-4fec-9c01-efd8829c8d7a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.644387] env[62585]: DEBUG nova.network.neutron [-] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.649158] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd16518-750e-43c6-a3a2-59e8db601c6c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.665146] env[62585]: DEBUG nova.compute.provider_tree [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.775622] env[62585]: ERROR nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 671.775622] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.775622] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.775622] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.775622] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.775622] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.775622] env[62585]: ERROR nova.compute.manager raise self.value [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.775622] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 671.775622] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.775622] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 671.776060] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.776060] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 671.776060] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 671.776060] env[62585]: ERROR nova.compute.manager [ 671.776060] env[62585]: Traceback (most recent call last): [ 671.776060] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 671.776060] env[62585]: listener.cb(fileno) [ 671.776060] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.776060] env[62585]: result = function(*args, **kwargs) [ 671.776060] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 671.776060] env[62585]: return func(*args, **kwargs) [ 671.776060] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.776060] env[62585]: raise e [ 671.776060] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.776060] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 671.776060] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.776060] env[62585]: created_port_ids = self._update_ports_for_instance( [ 671.776060] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.776060] env[62585]: with excutils.save_and_reraise_exception(): [ 671.776060] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.776060] env[62585]: self.force_reraise() [ 671.776060] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.776060] env[62585]: raise self.value [ 671.776060] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.776060] env[62585]: updated_port = self._update_port( [ 671.776060] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.776060] env[62585]: _ensure_no_port_binding_failure(port) [ 671.776060] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.776060] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 671.776969] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 671.776969] env[62585]: Removing descriptor: 15 [ 671.776969] env[62585]: ERROR nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Traceback (most recent call last): [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] yield resources [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self.driver.spawn(context, instance, image_meta, [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 671.776969] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] vm_ref = self.build_virtual_machine(instance, [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] vif_infos = vmwarevif.get_vif_info(self._session, [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] for vif in network_info: [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return self._sync_wrapper(fn, *args, **kwargs) [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self.wait() [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self[:] = self._gt.wait() [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return self._exit_event.wait() [ 671.777356] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] result = hub.switch() [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return self.greenlet.switch() [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] result = function(*args, **kwargs) [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return func(*args, **kwargs) [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] raise e [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] nwinfo = self.network_api.allocate_for_instance( [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.777687] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] created_port_ids = self._update_ports_for_instance( [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] with excutils.save_and_reraise_exception(): [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self.force_reraise() [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] raise self.value [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] updated_port = self._update_port( [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] _ensure_no_port_binding_failure(port) [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.778027] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] raise exception.PortBindingFailed(port_id=port['id']) [ 671.778331] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 671.778331] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] [ 671.778331] env[62585]: INFO nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Terminating instance [ 671.779725] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquiring lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.779878] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquired lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.780054] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.858564] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Releasing lock "refresh_cache-3695a09f-dffc-4537-ac78-faffd6bdd252" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.858974] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 671.859225] env[62585]: DEBUG nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.859934] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.887524] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.148706] env[62585]: INFO nova.compute.manager [-] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Took 1.02 seconds to deallocate network for instance. [ 672.151980] env[62585]: DEBUG nova.compute.claims [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 672.152223] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.168082] env[62585]: DEBUG nova.scheduler.client.report [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.303388] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.392066] env[62585]: DEBUG nova.network.neutron [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.437936] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.679453] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.971s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.680092] env[62585]: ERROR nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Traceback (most recent call last): [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self.driver.spawn(context, instance, image_meta, [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] vm_ref = self.build_virtual_machine(instance, [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.680092] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] for vif in network_info: [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] return self._sync_wrapper(fn, *args, **kwargs) [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self.wait() [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self[:] = self._gt.wait() [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] return self._exit_event.wait() [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] current.throw(*self._exc) [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.680456] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] result = function(*args, **kwargs) [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] return func(*args, **kwargs) [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] raise e [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] nwinfo = self.network_api.allocate_for_instance( [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] created_port_ids = self._update_ports_for_instance( [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] with excutils.save_and_reraise_exception(): [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] self.force_reraise() [ 672.680849] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] raise self.value [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] updated_port = self._update_port( [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] _ensure_no_port_binding_failure(port) [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] raise exception.PortBindingFailed(port_id=port['id']) [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] nova.exception.PortBindingFailed: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. [ 672.681293] env[62585]: ERROR nova.compute.manager [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] [ 672.681293] env[62585]: DEBUG nova.compute.utils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.683290] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Build of instance 86e3d197-2e8c-4357-ac0a-e1af8e247024 was re-scheduled: Binding failed for port 34e0888f-ed4b-4330-9155-39789e4652d1, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.683702] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.683931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquiring lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.684094] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Acquired lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.684253] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.685296] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.538s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.687204] env[62585]: INFO nova.compute.claims [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.870566] env[62585]: DEBUG nova.compute.manager [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Received event network-changed-02f0d99b-ed9d-4b15-82e5-aa932eb0287d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 672.870913] env[62585]: DEBUG nova.compute.manager [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Refreshing instance network info cache due to event network-changed-02f0d99b-ed9d-4b15-82e5-aa932eb0287d. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 672.871231] env[62585]: DEBUG oslo_concurrency.lockutils [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] Acquiring lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.896216] env[62585]: INFO nova.compute.manager [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] [instance: 3695a09f-dffc-4537-ac78-faffd6bdd252] Took 1.03 seconds to deallocate network for instance. [ 672.944019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Releasing lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.944019] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 672.944019] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.944019] env[62585]: DEBUG oslo_concurrency.lockutils [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] Acquired lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.944019] env[62585]: DEBUG nova.network.neutron [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Refreshing network info cache for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 672.944300] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-785ae5e1-bd7e-4663-a22a-2e1347f2b0e6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.953945] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07869ff1-275d-41db-b4b1-7c804747519f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.982391] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1cad8d1b-ed02-424c-879c-2f23d4d90b22 could not be found. [ 672.982610] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 672.982779] env[62585]: INFO nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Took 0.04 seconds to destroy the instance on the hypervisor. [ 672.983028] env[62585]: DEBUG oslo.service.loopingcall [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.983488] env[62585]: DEBUG nova.compute.manager [-] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 672.983631] env[62585]: DEBUG nova.network.neutron [-] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.002885] env[62585]: DEBUG nova.network.neutron [-] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.218781] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.322939] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.463209] env[62585]: DEBUG nova.network.neutron [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.505975] env[62585]: DEBUG nova.network.neutron [-] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.551548] env[62585]: DEBUG nova.network.neutron [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.827964] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Releasing lock "refresh_cache-86e3d197-2e8c-4357-ac0a-e1af8e247024" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.828256] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 673.828448] env[62585]: DEBUG nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.828516] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.860529] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.926372] env[62585]: INFO nova.scheduler.client.report [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Deleted allocations for instance 3695a09f-dffc-4537-ac78-faffd6bdd252 [ 674.010733] env[62585]: INFO nova.compute.manager [-] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Took 1.03 seconds to deallocate network for instance. [ 674.013996] env[62585]: DEBUG nova.compute.claims [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 674.013996] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.054611] env[62585]: DEBUG oslo_concurrency.lockutils [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] Releasing lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.054611] env[62585]: DEBUG nova.compute.manager [req-a9a76984-8e00-4074-85f6-71a54a14f62d req-ff2e4b64-df32-4420-b07f-907ed71dc3f0 service nova] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Received event network-vif-deleted-02f0d99b-ed9d-4b15-82e5-aa932eb0287d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 674.079311] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d83be74-321f-4cf2-96f9-9ef95ac15897 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.090107] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e689565-7632-4ee5-9977-1263300310a5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.123351] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600d1d07-ddbf-423e-8514-de8b29e8b160 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.131179] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a97510d-3402-4bef-94fd-298f22b1b6a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.144851] env[62585]: DEBUG nova.compute.provider_tree [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.363976] env[62585]: DEBUG nova.network.neutron [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.441824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f297a9e-d8c6-486f-8a15-3ee43da2b9d5 tempest-ServersV294TestFqdnHostnames-1935176655 tempest-ServersV294TestFqdnHostnames-1935176655-project-member] Lock "3695a09f-dffc-4537-ac78-faffd6bdd252" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.435s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.653078] env[62585]: DEBUG nova.scheduler.client.report [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.870312] env[62585]: INFO nova.compute.manager [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] [instance: 86e3d197-2e8c-4357-ac0a-e1af8e247024] Took 1.04 seconds to deallocate network for instance. [ 674.944379] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 675.127964] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "ad45de09-f60c-4ac5-a4ff-7088d9742d6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.128245] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "ad45de09-f60c-4ac5-a4ff-7088d9742d6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.159107] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.160019] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 675.164985] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.313s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.474886] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.645830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "dd57237d-875e-453a-b830-749776ce10b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.645830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "dd57237d-875e-453a-b830-749776ce10b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.669315] env[62585]: DEBUG nova.compute.utils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.673584] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "92168077-2b7e-4355-9880-a2f62674fc7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.674029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "92168077-2b7e-4355-9880-a2f62674fc7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.674029] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 675.674793] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 675.703551] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.703788] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.724996] env[62585]: DEBUG nova.policy [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1160632103914a6ba10101dbd49253e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd37d69631db4432db627c56d1f9aad53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 675.903686] env[62585]: INFO nova.scheduler.client.report [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Deleted allocations for instance 86e3d197-2e8c-4357-ac0a-e1af8e247024 [ 676.016897] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Successfully created port: ae42ce76-2f63-4dd4-9a61-925fb66a7a38 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.097940] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4070ba93-d68d-4eef-87a7-19e4dc76f5ec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.105840] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb9b6e2-73f2-414a-a881-d41c1a550312 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.136016] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66dd2e2d-434e-4d04-8601-2490761bb96e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.142266] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e001869a-9a7f-4deb-b8db-46c628ed9938 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.155569] env[62585]: DEBUG nova.compute.provider_tree [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.176711] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 676.417759] env[62585]: DEBUG oslo_concurrency.lockutils [None req-82d3b09f-ef48-4887-a314-86720feedecc tempest-ServerAddressesTestJSON-1512726450 tempest-ServerAddressesTestJSON-1512726450-project-member] Lock "86e3d197-2e8c-4357-ac0a-e1af8e247024" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.390s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.662019] env[62585]: DEBUG nova.scheduler.client.report [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.923218] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 677.131076] env[62585]: DEBUG nova.compute.manager [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Received event network-changed-ae42ce76-2f63-4dd4-9a61-925fb66a7a38 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.131256] env[62585]: DEBUG nova.compute.manager [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Refreshing instance network info cache due to event network-changed-ae42ce76-2f63-4dd4-9a61-925fb66a7a38. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 677.131410] env[62585]: DEBUG oslo_concurrency.lockutils [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] Acquiring lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.131549] env[62585]: DEBUG oslo_concurrency.lockutils [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] Acquired lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.131706] env[62585]: DEBUG nova.network.neutron [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Refreshing network info cache for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.164498] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.002s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.165427] env[62585]: ERROR nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Traceback (most recent call last): [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self.driver.spawn(context, instance, image_meta, [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] vm_ref = self.build_virtual_machine(instance, [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.165427] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] for vif in network_info: [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] return self._sync_wrapper(fn, *args, **kwargs) [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self.wait() [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self[:] = self._gt.wait() [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] return self._exit_event.wait() [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] current.throw(*self._exc) [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.165716] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] result = function(*args, **kwargs) [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] return func(*args, **kwargs) [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] raise e [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] nwinfo = self.network_api.allocate_for_instance( [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] created_port_ids = self._update_ports_for_instance( [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] with excutils.save_and_reraise_exception(): [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] self.force_reraise() [ 677.166035] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] raise self.value [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] updated_port = self._update_port( [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] _ensure_no_port_binding_failure(port) [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] raise exception.PortBindingFailed(port_id=port['id']) [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] nova.exception.PortBindingFailed: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. [ 677.166419] env[62585]: ERROR nova.compute.manager [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] [ 677.166419] env[62585]: DEBUG nova.compute.utils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 677.169162] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.823s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.171080] env[62585]: INFO nova.compute.claims [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.179050] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Build of instance aae9ff25-f304-4dbe-824c-b17b3522655c was re-scheduled: Binding failed for port eebdbdfc-0983-4d19-a0f7-cda4fb115666, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 677.179596] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 677.179875] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquiring lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.180073] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Acquired lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.180902] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.187444] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 677.225711] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 677.225988] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 677.226210] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.226415] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 677.226891] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.226891] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 677.227032] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 677.230308] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 677.230586] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 677.230841] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 677.231135] env[62585]: DEBUG nova.virt.hardware [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 677.232449] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993288a9-7c33-4ea2-92ef-95b54865ae17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.242041] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551d5541-e922-447e-a9d2-d2db6ad9b562 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.434181] env[62585]: ERROR nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 677.434181] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.434181] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.434181] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.434181] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.434181] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.434181] env[62585]: ERROR nova.compute.manager raise self.value [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.434181] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 677.434181] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.434181] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 677.434762] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.434762] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 677.434762] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 677.434762] env[62585]: ERROR nova.compute.manager [ 677.434762] env[62585]: Traceback (most recent call last): [ 677.434762] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 677.434762] env[62585]: listener.cb(fileno) [ 677.434762] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.434762] env[62585]: result = function(*args, **kwargs) [ 677.434762] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.434762] env[62585]: return func(*args, **kwargs) [ 677.434762] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.434762] env[62585]: raise e [ 677.434762] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.434762] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 677.434762] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.434762] env[62585]: created_port_ids = self._update_ports_for_instance( [ 677.434762] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.434762] env[62585]: with excutils.save_and_reraise_exception(): [ 677.434762] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.434762] env[62585]: self.force_reraise() [ 677.434762] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.434762] env[62585]: raise self.value [ 677.434762] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.434762] env[62585]: updated_port = self._update_port( [ 677.434762] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.434762] env[62585]: _ensure_no_port_binding_failure(port) [ 677.434762] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.434762] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 677.435480] env[62585]: nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 677.435480] env[62585]: Removing descriptor: 15 [ 677.436163] env[62585]: ERROR nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Traceback (most recent call last): [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] yield resources [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self.driver.spawn(context, instance, image_meta, [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] vm_ref = self.build_virtual_machine(instance, [ 677.436163] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] for vif in network_info: [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return self._sync_wrapper(fn, *args, **kwargs) [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self.wait() [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self[:] = self._gt.wait() [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return self._exit_event.wait() [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.438047] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] result = hub.switch() [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return self.greenlet.switch() [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] result = function(*args, **kwargs) [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return func(*args, **kwargs) [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] raise e [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] nwinfo = self.network_api.allocate_for_instance( [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] created_port_ids = self._update_ports_for_instance( [ 677.439254] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] with excutils.save_and_reraise_exception(): [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self.force_reraise() [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] raise self.value [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] updated_port = self._update_port( [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] _ensure_no_port_binding_failure(port) [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] raise exception.PortBindingFailed(port_id=port['id']) [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 677.439578] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] [ 677.439909] env[62585]: INFO nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Terminating instance [ 677.441026] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquiring lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.450742] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.662086] env[62585]: DEBUG nova.network.neutron [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.705073] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.770482] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.805537] env[62585]: DEBUG nova.network.neutron [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.274858] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Releasing lock "refresh_cache-aae9ff25-f304-4dbe-824c-b17b3522655c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.275223] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 678.275311] env[62585]: DEBUG nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.275432] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 678.300094] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.309979] env[62585]: DEBUG oslo_concurrency.lockutils [req-62b4df05-0627-497e-8b5e-daa271251a7e req-515de09b-db20-4237-b4ca-dd7cb1137aa8 service nova] Releasing lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.309979] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquired lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.309979] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.587069] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de88bac-c9d9-445d-86e7-fad03978856f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.594767] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d27aed8-8bc6-4504-aa72-a46806c30138 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.629773] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564d5178-70bb-4000-b767-24b181611f58 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.637616] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45d0f50-4d07-499e-8540-d6129ef3a03d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.655771] env[62585]: DEBUG nova.compute.provider_tree [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.803360] env[62585]: DEBUG nova.network.neutron [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.839774] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.031684] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.157884] env[62585]: DEBUG nova.scheduler.client.report [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.258809] env[62585]: DEBUG nova.compute.manager [req-33d297af-31a1-4066-8686-3594f623afe9 req-b93a0d4e-e12a-405b-b84a-6a79f454ca20 service nova] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Received event network-vif-deleted-ae42ce76-2f63-4dd4-9a61-925fb66a7a38 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.309833] env[62585]: INFO nova.compute.manager [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] [instance: aae9ff25-f304-4dbe-824c-b17b3522655c] Took 1.03 seconds to deallocate network for instance. [ 679.536139] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Releasing lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.536139] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 679.536139] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.536139] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39ae9b87-cf1a-49bb-a158-d422efc0a6d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.552841] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536ad783-f2a2-4ede-83c1-0e5a96451271 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.575816] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ed0ec962-3c4e-409f-9332-0a79ca1c6ed3 could not be found. [ 679.577030] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.577030] env[62585]: INFO nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 679.577030] env[62585]: DEBUG oslo.service.loopingcall [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.577030] env[62585]: DEBUG nova.compute.manager [-] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.577030] env[62585]: DEBUG nova.network.neutron [-] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.594539] env[62585]: DEBUG nova.network.neutron [-] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.663301] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.663865] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 679.667189] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.828s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.667375] env[62585]: DEBUG nova.objects.instance [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62585) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 680.096725] env[62585]: DEBUG nova.network.neutron [-] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.173329] env[62585]: DEBUG nova.compute.utils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 680.174726] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 680.174884] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 680.258560] env[62585]: DEBUG nova.policy [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f3e88a0084541a996909b07f024a52a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b04f6ed8a6f84442b1bf64911ba06b47', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 680.351922] env[62585]: INFO nova.scheduler.client.report [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Deleted allocations for instance aae9ff25-f304-4dbe-824c-b17b3522655c [ 680.599541] env[62585]: INFO nova.compute.manager [-] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Took 1.02 seconds to deallocate network for instance. [ 680.602842] env[62585]: DEBUG nova.compute.claims [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 680.603051] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.649195] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Successfully created port: 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.682167] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 680.690432] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d89c931f-5dda-4283-8b87-18cd46753b14 tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.691660] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.097s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.694083] env[62585]: DEBUG nova.objects.instance [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lazy-loading 'resources' on Instance uuid 2cf0927d-8d98-4554-92ce-c049e1ea179c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 680.867366] env[62585]: DEBUG oslo_concurrency.lockutils [None req-02538a76-a426-4d85-a8f0-cba96aa06288 tempest-TenantUsagesTestJSON-223697499 tempest-TenantUsagesTestJSON-223697499-project-member] Lock "aae9ff25-f304-4dbe-824c-b17b3522655c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.694s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.369164] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 681.607433] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8af1e4e-7271-4464-82e9-c6fe0a614f76 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.616253] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533c76f2-f046-4ed8-967a-d8808d2181eb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.654299] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899c6239-5178-4be7-98bc-09bd95a95cdd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.659095] env[62585]: DEBUG nova.compute.manager [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Received event network-changed-3dfed7bc-259f-44c5-a77f-4ce1f09e11cb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.659277] env[62585]: DEBUG nova.compute.manager [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Refreshing instance network info cache due to event network-changed-3dfed7bc-259f-44c5-a77f-4ce1f09e11cb. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 681.659419] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] Acquiring lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.659559] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] Acquired lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.659715] env[62585]: DEBUG nova.network.neutron [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Refreshing network info cache for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.666843] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13adb567-356e-470c-891c-51459650f163 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.681126] env[62585]: DEBUG nova.compute.provider_tree [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.695174] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 681.724317] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.724604] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.724763] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.724943] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.729502] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.729502] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.729502] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.729502] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.729502] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.729652] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.729652] env[62585]: DEBUG nova.virt.hardware [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.729652] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbd0dee-c07c-4215-9c4b-81bd907d5b15 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.737348] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7eb6452-d781-4cc3-8dde-b8c3b9b5ae9e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.900484] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.949993] env[62585]: ERROR nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 681.949993] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.949993] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.949993] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.949993] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.949993] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.949993] env[62585]: ERROR nova.compute.manager raise self.value [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.949993] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 681.949993] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.949993] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 681.950441] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.950441] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 681.950441] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 681.950441] env[62585]: ERROR nova.compute.manager [ 681.950441] env[62585]: Traceback (most recent call last): [ 681.950441] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 681.950441] env[62585]: listener.cb(fileno) [ 681.950441] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.950441] env[62585]: result = function(*args, **kwargs) [ 681.950441] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 681.950441] env[62585]: return func(*args, **kwargs) [ 681.950441] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.950441] env[62585]: raise e [ 681.950441] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.950441] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 681.950441] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.950441] env[62585]: created_port_ids = self._update_ports_for_instance( [ 681.950441] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.950441] env[62585]: with excutils.save_and_reraise_exception(): [ 681.950441] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.950441] env[62585]: self.force_reraise() [ 681.950441] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.950441] env[62585]: raise self.value [ 681.950441] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.950441] env[62585]: updated_port = self._update_port( [ 681.950441] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.950441] env[62585]: _ensure_no_port_binding_failure(port) [ 681.950441] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.950441] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 681.951190] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 681.951190] env[62585]: Removing descriptor: 15 [ 681.951190] env[62585]: ERROR nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Traceback (most recent call last): [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] yield resources [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self.driver.spawn(context, instance, image_meta, [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.951190] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] vm_ref = self.build_virtual_machine(instance, [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] for vif in network_info: [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return self._sync_wrapper(fn, *args, **kwargs) [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self.wait() [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self[:] = self._gt.wait() [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return self._exit_event.wait() [ 681.951885] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] result = hub.switch() [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return self.greenlet.switch() [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] result = function(*args, **kwargs) [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return func(*args, **kwargs) [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] raise e [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] nwinfo = self.network_api.allocate_for_instance( [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.952279] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] created_port_ids = self._update_ports_for_instance( [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] with excutils.save_and_reraise_exception(): [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self.force_reraise() [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] raise self.value [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] updated_port = self._update_port( [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] _ensure_no_port_binding_failure(port) [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.952666] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] raise exception.PortBindingFailed(port_id=port['id']) [ 681.953131] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 681.953131] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] [ 681.953131] env[62585]: INFO nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Terminating instance [ 681.953226] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquiring lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.185009] env[62585]: DEBUG nova.scheduler.client.report [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.192822] env[62585]: DEBUG nova.network.neutron [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.321162] env[62585]: DEBUG nova.network.neutron [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.693878] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.002s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.697183] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.991s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.699082] env[62585]: INFO nova.compute.claims [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.718728] env[62585]: INFO nova.scheduler.client.report [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleted allocations for instance 2cf0927d-8d98-4554-92ce-c049e1ea179c [ 682.787931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquiring lock "66af981d-2fa4-4ef4-ac39-3f8f78c543af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.788351] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Lock "66af981d-2fa4-4ef4-ac39-3f8f78c543af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.825026] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ea26218-051b-4aaa-844e-badab2a77cee req-c6c0354b-9390-4fe5-b3b2-9abee2a06e37 service nova] Releasing lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.825305] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquired lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.825639] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.228573] env[62585]: DEBUG oslo_concurrency.lockutils [None req-487fc4f3-677c-4014-9ffe-aed88e4b27fa tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "2cf0927d-8d98-4554-92ce-c049e1ea179c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.336s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.347957] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.432700] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.583174] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "16f01d66-44f8-4912-989a-48c39f667c95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.583445] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "16f01d66-44f8-4912-989a-48c39f667c95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.583651] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "16f01d66-44f8-4912-989a-48c39f667c95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.583828] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "16f01d66-44f8-4912-989a-48c39f667c95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.583990] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "16f01d66-44f8-4912-989a-48c39f667c95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.591760] env[62585]: INFO nova.compute.manager [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Terminating instance [ 683.593471] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "refresh_cache-16f01d66-44f8-4912-989a-48c39f667c95" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.593622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquired lock "refresh_cache-16f01d66-44f8-4912-989a-48c39f667c95" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.593781] env[62585]: DEBUG nova.network.neutron [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.678904] env[62585]: DEBUG nova.compute.manager [req-065cd094-04a9-4326-91c5-9ebef710d45c req-aa76a3eb-47c1-49bb-befb-964b98b0955b service nova] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Received event network-vif-deleted-3dfed7bc-259f-44c5-a77f-4ce1f09e11cb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 683.936912] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Releasing lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.937734] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 683.938118] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 683.938700] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ca41983-4bba-4f10-8795-58aa7b2d5067 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.948546] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f673da-6f5f-4115-b5f4-d96c6c87d439 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.975186] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae66c3e2-eac8-4239-b5be-64dc0dcf2c04 could not be found. [ 683.975544] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.975876] env[62585]: INFO nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Took 0.04 seconds to destroy the instance on the hypervisor. [ 683.980091] env[62585]: DEBUG oslo.service.loopingcall [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.980091] env[62585]: DEBUG nova.compute.manager [-] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 683.980091] env[62585]: DEBUG nova.network.neutron [-] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.995759] env[62585]: DEBUG nova.network.neutron [-] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.047255] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a36043-0ae4-423d-a3c7-8e1c6818d576 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.056017] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3ef60b-b265-4b31-b7ff-9c11894bd768 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.090680] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a39b90-ad6b-4a48-ad4c-879d1c8ad0a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.100014] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7082fab-952f-4750-b30a-95198c72fbc4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.114048] env[62585]: DEBUG nova.compute.provider_tree [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.117219] env[62585]: DEBUG nova.network.neutron [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.164057] env[62585]: DEBUG nova.network.neutron [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.498841] env[62585]: DEBUG nova.network.neutron [-] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.620579] env[62585]: DEBUG nova.scheduler.client.report [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.666957] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Releasing lock "refresh_cache-16f01d66-44f8-4912-989a-48c39f667c95" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.667306] env[62585]: DEBUG nova.compute.manager [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 684.667511] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 684.668384] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cded8776-ef77-447d-abae-ecb187d4ee1e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.676850] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 684.677118] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1471ca5b-5aee-42c0-9b35-fc12ee26a5ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.682847] env[62585]: DEBUG oslo_vmware.api [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 684.682847] env[62585]: value = "task-1384690" [ 684.682847] env[62585]: _type = "Task" [ 684.682847] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.692026] env[62585]: DEBUG oslo_vmware.api [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.001827] env[62585]: INFO nova.compute.manager [-] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Took 1.02 seconds to deallocate network for instance. [ 685.004226] env[62585]: DEBUG nova.compute.claims [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 685.004422] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.125761] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.126819] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 685.129256] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.501s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.130781] env[62585]: INFO nova.compute.claims [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.195557] env[62585]: DEBUG oslo_vmware.api [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384690, 'name': PowerOffVM_Task, 'duration_secs': 0.200218} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.195900] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.196143] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.196437] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fd0de58-60c0-465d-af4f-2245e8f9e2a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.223522] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.223764] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.223940] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleting the datastore file [datastore1] 16f01d66-44f8-4912-989a-48c39f667c95 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.224224] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b8f0207-874a-41ad-b2d9-84327d958759 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.232147] env[62585]: DEBUG oslo_vmware.api [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for the task: (returnval){ [ 685.232147] env[62585]: value = "task-1384692" [ 685.232147] env[62585]: _type = "Task" [ 685.232147] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.242597] env[62585]: DEBUG oslo_vmware.api [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.572061] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "c6f0ee10-c5cc-41ad-8b81-f7644921845b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.572280] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "c6f0ee10-c5cc-41ad-8b81-f7644921845b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.635682] env[62585]: DEBUG nova.compute.utils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 685.637165] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 685.637343] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 685.697460] env[62585]: DEBUG nova.policy [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3a2a6c12da146c9a660a4df19851534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14248ed12c6146a39f4b542c5045db37', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 685.746947] env[62585]: DEBUG oslo_vmware.api [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Task: {'id': task-1384692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099923} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.747264] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.747486] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 685.747685] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 685.747860] env[62585]: INFO nova.compute.manager [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Took 1.08 seconds to destroy the instance on the hypervisor. [ 685.748119] env[62585]: DEBUG oslo.service.loopingcall [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.748312] env[62585]: DEBUG nova.compute.manager [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 685.748548] env[62585]: DEBUG nova.network.neutron [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 685.763241] env[62585]: DEBUG nova.network.neutron [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.965039] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Successfully created port: cc86cd95-3cb7-47e1-a436-ae6433f69748 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.143211] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 686.268938] env[62585]: DEBUG nova.network.neutron [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.579349] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa0d963-a54b-4a70-aa22-ab82785c2298 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.587696] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9919289c-0d43-4f19-8bf7-b4b59fc1462f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.619913] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f82969-b527-4f30-87af-9707117f8bea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.628678] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003eec33-348d-4e12-9550-2cf77b768266 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.642969] env[62585]: DEBUG nova.compute.provider_tree [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.765148] env[62585]: DEBUG nova.compute.manager [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Received event network-changed-cc86cd95-3cb7-47e1-a436-ae6433f69748 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 686.765148] env[62585]: DEBUG nova.compute.manager [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Refreshing instance network info cache due to event network-changed-cc86cd95-3cb7-47e1-a436-ae6433f69748. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 686.765148] env[62585]: DEBUG oslo_concurrency.lockutils [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] Acquiring lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.765148] env[62585]: DEBUG oslo_concurrency.lockutils [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] Acquired lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.765148] env[62585]: DEBUG nova.network.neutron [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Refreshing network info cache for port cc86cd95-3cb7-47e1-a436-ae6433f69748 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 686.772628] env[62585]: INFO nova.compute.manager [-] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Took 1.02 seconds to deallocate network for instance. [ 687.120063] env[62585]: ERROR nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 687.120063] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.120063] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.120063] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.120063] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.120063] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.120063] env[62585]: ERROR nova.compute.manager raise self.value [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.120063] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 687.120063] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.120063] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 687.120562] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.120562] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 687.120562] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 687.120562] env[62585]: ERROR nova.compute.manager [ 687.120562] env[62585]: Traceback (most recent call last): [ 687.120562] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 687.120562] env[62585]: listener.cb(fileno) [ 687.120562] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.120562] env[62585]: result = function(*args, **kwargs) [ 687.120562] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.120562] env[62585]: return func(*args, **kwargs) [ 687.120562] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.120562] env[62585]: raise e [ 687.120562] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.120562] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 687.120562] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.120562] env[62585]: created_port_ids = self._update_ports_for_instance( [ 687.120562] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.120562] env[62585]: with excutils.save_and_reraise_exception(): [ 687.120562] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.120562] env[62585]: self.force_reraise() [ 687.120562] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.120562] env[62585]: raise self.value [ 687.120562] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.120562] env[62585]: updated_port = self._update_port( [ 687.120562] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.120562] env[62585]: _ensure_no_port_binding_failure(port) [ 687.120562] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.120562] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 687.121447] env[62585]: nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 687.121447] env[62585]: Removing descriptor: 15 [ 687.145486] env[62585]: DEBUG nova.scheduler.client.report [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.158077] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 687.182767] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.183070] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.183187] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.183474] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.184377] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.184377] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.184377] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.184377] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.184377] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.184779] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.184779] env[62585]: DEBUG nova.virt.hardware [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.185485] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190aa351-505c-41f6-a57c-728af8bdae98 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.193355] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7dbfe0-dae8-4296-9292-64742b9976e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.208166] env[62585]: ERROR nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Traceback (most recent call last): [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] yield resources [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self.driver.spawn(context, instance, image_meta, [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] vm_ref = self.build_virtual_machine(instance, [ 687.208166] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] vif_infos = vmwarevif.get_vif_info(self._session, [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] for vif in network_info: [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] return self._sync_wrapper(fn, *args, **kwargs) [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self.wait() [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self[:] = self._gt.wait() [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] return self._exit_event.wait() [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 687.208613] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] current.throw(*self._exc) [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] result = function(*args, **kwargs) [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] return func(*args, **kwargs) [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] raise e [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] nwinfo = self.network_api.allocate_for_instance( [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] created_port_ids = self._update_ports_for_instance( [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] with excutils.save_and_reraise_exception(): [ 687.208970] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self.force_reraise() [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] raise self.value [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] updated_port = self._update_port( [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] _ensure_no_port_binding_failure(port) [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] raise exception.PortBindingFailed(port_id=port['id']) [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 687.209310] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] [ 687.209310] env[62585]: INFO nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Terminating instance [ 687.210207] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquiring lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.277786] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.283659] env[62585]: DEBUG nova.network.neutron [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.363499] env[62585]: DEBUG nova.network.neutron [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.650696] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.651232] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 687.653721] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.133s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.655076] env[62585]: INFO nova.compute.claims [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.866850] env[62585]: DEBUG oslo_concurrency.lockutils [req-a994782f-ae88-45ee-a8b7-e8ad960d47bd req-1d877472-9083-4c2b-9785-190feb019486 service nova] Releasing lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.867278] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquired lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.867467] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.158898] env[62585]: DEBUG nova.compute.utils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 688.161992] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 688.162176] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 688.221293] env[62585]: DEBUG nova.policy [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43281571ec1748caa50046e581adc491', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe48bca1d1de43509f9073d4d0690794', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 688.387454] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.461106] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.542133] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Successfully created port: 9361b60c-f746-4c08-b38a-bf00ba2faa45 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.664249] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 688.830672] env[62585]: DEBUG nova.compute.manager [req-cff968f6-7003-412e-896c-323cf5e0dc55 req-a679af4a-60f6-4fd7-80ac-c800f2706df0 service nova] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Received event network-vif-deleted-cc86cd95-3cb7-47e1-a436-ae6433f69748 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 688.962648] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Releasing lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.963066] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 688.963264] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.963741] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb58477c-e30e-401d-8b7b-40ed4172888a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.973209] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea37eb95-a2fa-4001-a021-88e12b1ce181 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.996460] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b4d919f-552e-489e-bcfb-f6447cf81fb8 could not be found. [ 688.996687] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.996861] env[62585]: INFO nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 688.997102] env[62585]: DEBUG oslo.service.loopingcall [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.999763] env[62585]: DEBUG nova.compute.manager [-] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 688.999763] env[62585]: DEBUG nova.network.neutron [-] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.010205] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8211a7-c5c1-4f14-b0ad-8b1232c1c605 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.013509] env[62585]: DEBUG nova.network.neutron [-] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.017331] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9eacac6-f2fe-4037-abf6-67bd07c19162 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.048883] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4786dcbc-c88a-44a3-9bbc-9e99f246e19c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.056363] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7937b0ae-ed76-4a27-8d09-5308716df1b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.069725] env[62585]: DEBUG nova.compute.provider_tree [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.495418] env[62585]: ERROR nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 689.495418] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 689.495418] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 689.495418] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 689.495418] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.495418] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.495418] env[62585]: ERROR nova.compute.manager raise self.value [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 689.495418] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 689.495418] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.495418] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 689.496121] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.496121] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 689.496121] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 689.496121] env[62585]: ERROR nova.compute.manager [ 689.496121] env[62585]: Traceback (most recent call last): [ 689.496121] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 689.496121] env[62585]: listener.cb(fileno) [ 689.496121] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 689.496121] env[62585]: result = function(*args, **kwargs) [ 689.496121] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 689.496121] env[62585]: return func(*args, **kwargs) [ 689.496121] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 689.496121] env[62585]: raise e [ 689.496121] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 689.496121] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 689.496121] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 689.496121] env[62585]: created_port_ids = self._update_ports_for_instance( [ 689.496121] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 689.496121] env[62585]: with excutils.save_and_reraise_exception(): [ 689.496121] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.496121] env[62585]: self.force_reraise() [ 689.496121] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.496121] env[62585]: raise self.value [ 689.496121] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 689.496121] env[62585]: updated_port = self._update_port( [ 689.496121] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.496121] env[62585]: _ensure_no_port_binding_failure(port) [ 689.496121] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.496121] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 689.496996] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 689.496996] env[62585]: Removing descriptor: 15 [ 689.517391] env[62585]: DEBUG nova.network.neutron [-] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.572637] env[62585]: DEBUG nova.scheduler.client.report [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 689.676377] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 689.700237] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 689.700495] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 689.700648] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.700827] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 689.700968] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.701133] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 689.701337] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 689.701492] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 689.701655] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 689.701822] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 689.701999] env[62585]: DEBUG nova.virt.hardware [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 689.702844] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4048de4-2094-4013-993a-dfaff74a3416 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.710186] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6712c862-ce6f-44b5-a726-44dce0308ddb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.723839] env[62585]: ERROR nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Traceback (most recent call last): [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] yield resources [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self.driver.spawn(context, instance, image_meta, [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self._vmops.spawn(context, instance, image_meta, injected_files, [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] vm_ref = self.build_virtual_machine(instance, [ 689.723839] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] vif_infos = vmwarevif.get_vif_info(self._session, [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] for vif in network_info: [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] return self._sync_wrapper(fn, *args, **kwargs) [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self.wait() [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self[:] = self._gt.wait() [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] return self._exit_event.wait() [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 689.724236] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] current.throw(*self._exc) [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] result = function(*args, **kwargs) [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] return func(*args, **kwargs) [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] raise e [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] nwinfo = self.network_api.allocate_for_instance( [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] created_port_ids = self._update_ports_for_instance( [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] with excutils.save_and_reraise_exception(): [ 689.724676] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self.force_reraise() [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] raise self.value [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] updated_port = self._update_port( [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] _ensure_no_port_binding_failure(port) [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] raise exception.PortBindingFailed(port_id=port['id']) [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 689.725070] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] [ 689.725070] env[62585]: INFO nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Terminating instance [ 689.726038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.726198] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquired lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.726361] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 690.020644] env[62585]: INFO nova.compute.manager [-] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Took 1.02 seconds to deallocate network for instance. [ 690.023020] env[62585]: DEBUG nova.compute.claims [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 690.023204] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.078082] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.078082] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 690.080481] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.928s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.244376] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.309715] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.586065] env[62585]: DEBUG nova.compute.utils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 690.589758] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 690.590016] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.639995] env[62585]: DEBUG nova.policy [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43281571ec1748caa50046e581adc491', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe48bca1d1de43509f9073d4d0690794', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 690.812213] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Releasing lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.812617] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 690.812806] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 690.813103] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4591391f-c93c-4599-81cc-db88070772b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.828103] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949ce9ac-097f-4522-9c2a-d2f1875f8f9e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.854077] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4557a853-232e-49e5-9052-ebf54d68e998 could not be found. [ 690.854327] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.854533] env[62585]: INFO nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Took 0.04 seconds to destroy the instance on the hypervisor. [ 690.854805] env[62585]: DEBUG oslo.service.loopingcall [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.857335] env[62585]: DEBUG nova.compute.manager [-] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 690.857450] env[62585]: DEBUG nova.network.neutron [-] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 690.868244] env[62585]: DEBUG nova.compute.manager [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Received event network-changed-9361b60c-f746-4c08-b38a-bf00ba2faa45 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 690.868430] env[62585]: DEBUG nova.compute.manager [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Refreshing instance network info cache due to event network-changed-9361b60c-f746-4c08-b38a-bf00ba2faa45. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 690.868638] env[62585]: DEBUG oslo_concurrency.lockutils [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] Acquiring lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.868777] env[62585]: DEBUG oslo_concurrency.lockutils [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] Acquired lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.868929] env[62585]: DEBUG nova.network.neutron [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Refreshing network info cache for port 9361b60c-f746-4c08-b38a-bf00ba2faa45 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.880479] env[62585]: DEBUG nova.network.neutron [-] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.944013] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3ff71d-07ea-49f8-b857-2ef9cf9f65c3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.951785] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b2536f-a072-4441-8a08-e7eab05eb0d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.984619] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081ee2b2-3b60-46e6-a78e-9439adb6fefa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.988484] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Successfully created port: e8a71e43-82b7-41db-871c-0cd81bf0c6a9 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.993213] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d91e06-9ba0-4f96-805a-4393b7f0bb08 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.005896] env[62585]: DEBUG nova.compute.provider_tree [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.090598] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 691.384404] env[62585]: DEBUG nova.network.neutron [-] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.388030] env[62585]: DEBUG nova.network.neutron [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.452067] env[62585]: DEBUG nova.network.neutron [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.511970] env[62585]: DEBUG nova.scheduler.client.report [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 691.891077] env[62585]: INFO nova.compute.manager [-] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Took 1.03 seconds to deallocate network for instance. [ 691.897582] env[62585]: DEBUG nova.compute.claims [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 691.897791] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.900029] env[62585]: ERROR nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 691.900029] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.900029] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.900029] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.900029] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.900029] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.900029] env[62585]: ERROR nova.compute.manager raise self.value [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.900029] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 691.900029] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.900029] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 691.900513] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.900513] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 691.900513] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 691.900513] env[62585]: ERROR nova.compute.manager [ 691.900513] env[62585]: Traceback (most recent call last): [ 691.900513] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 691.900513] env[62585]: listener.cb(fileno) [ 691.900513] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.900513] env[62585]: result = function(*args, **kwargs) [ 691.900513] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.900513] env[62585]: return func(*args, **kwargs) [ 691.900513] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.900513] env[62585]: raise e [ 691.900513] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.900513] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 691.900513] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.900513] env[62585]: created_port_ids = self._update_ports_for_instance( [ 691.900513] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.900513] env[62585]: with excutils.save_and_reraise_exception(): [ 691.900513] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.900513] env[62585]: self.force_reraise() [ 691.900513] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.900513] env[62585]: raise self.value [ 691.900513] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.900513] env[62585]: updated_port = self._update_port( [ 691.900513] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.900513] env[62585]: _ensure_no_port_binding_failure(port) [ 691.900513] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.900513] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 691.901334] env[62585]: nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 691.901334] env[62585]: Removing descriptor: 15 [ 691.958874] env[62585]: DEBUG oslo_concurrency.lockutils [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] Releasing lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.960063] env[62585]: DEBUG nova.compute.manager [req-0660f46d-21d8-44c5-8b44-5f2b00d81c00 req-99983031-1d9a-4598-b559-89d8a4a3eb61 service nova] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Received event network-vif-deleted-9361b60c-f746-4c08-b38a-bf00ba2faa45 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 692.017485] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.937s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.018141] env[62585]: ERROR nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Traceback (most recent call last): [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self.driver.spawn(context, instance, image_meta, [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] vm_ref = self.build_virtual_machine(instance, [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.018141] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] for vif in network_info: [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return self._sync_wrapper(fn, *args, **kwargs) [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self.wait() [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self[:] = self._gt.wait() [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return self._exit_event.wait() [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] result = hub.switch() [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 692.018448] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return self.greenlet.switch() [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] result = function(*args, **kwargs) [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] return func(*args, **kwargs) [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] raise e [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] nwinfo = self.network_api.allocate_for_instance( [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] created_port_ids = self._update_ports_for_instance( [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] with excutils.save_and_reraise_exception(): [ 692.018994] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] self.force_reraise() [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] raise self.value [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] updated_port = self._update_port( [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] _ensure_no_port_binding_failure(port) [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] raise exception.PortBindingFailed(port_id=port['id']) [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] nova.exception.PortBindingFailed: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. [ 692.019548] env[62585]: ERROR nova.compute.manager [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] [ 692.019999] env[62585]: DEBUG nova.compute.utils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 692.020154] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.006s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.023030] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Build of instance 3b50dbde-2969-4a4b-ae35-42416342a60b was re-scheduled: Binding failed for port 531674da-2207-4a18-93cd-5279aed15d9c, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 692.023366] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 692.023581] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquiring lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.023727] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Acquired lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.023882] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.102023] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 692.125988] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.126254] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.126477] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.126769] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.126926] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.127087] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.127289] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.127440] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.127599] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.127752] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.127916] env[62585]: DEBUG nova.virt.hardware [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.128753] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3594775f-515a-40bb-a985-daad12785717 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.136615] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497106a8-1e5e-44b8-b55d-4f12bbc118dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.151043] env[62585]: ERROR nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Traceback (most recent call last): [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] yield resources [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self.driver.spawn(context, instance, image_meta, [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] vm_ref = self.build_virtual_machine(instance, [ 692.151043] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] for vif in network_info: [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] return self._sync_wrapper(fn, *args, **kwargs) [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self.wait() [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self[:] = self._gt.wait() [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] return self._exit_event.wait() [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 692.151392] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] current.throw(*self._exc) [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] result = function(*args, **kwargs) [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] return func(*args, **kwargs) [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] raise e [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] nwinfo = self.network_api.allocate_for_instance( [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] created_port_ids = self._update_ports_for_instance( [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] with excutils.save_and_reraise_exception(): [ 692.151767] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self.force_reraise() [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] raise self.value [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] updated_port = self._update_port( [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] _ensure_no_port_binding_failure(port) [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] raise exception.PortBindingFailed(port_id=port['id']) [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 692.152170] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] [ 692.152170] env[62585]: INFO nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Terminating instance [ 692.153163] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.153320] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquired lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.153477] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.543541] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.596413] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.670548] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.726491] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.801058] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d39d93-e827-4bce-809d-0dfe676fad70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.809597] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc555395-d51c-44db-a349-9d85021bf592 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.839736] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a5feea-73d2-428a-a334-5fa3966623e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.846706] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15d4cba-0b2f-4200-b5ea-6c59e810e0ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.859461] env[62585]: DEBUG nova.compute.provider_tree [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.900611] env[62585]: DEBUG nova.compute.manager [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Received event network-changed-e8a71e43-82b7-41db-871c-0cd81bf0c6a9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 692.900611] env[62585]: DEBUG nova.compute.manager [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Refreshing instance network info cache due to event network-changed-e8a71e43-82b7-41db-871c-0cd81bf0c6a9. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 692.900611] env[62585]: DEBUG oslo_concurrency.lockutils [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] Acquiring lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.099603] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Releasing lock "refresh_cache-3b50dbde-2969-4a4b-ae35-42416342a60b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.099882] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 693.100087] env[62585]: DEBUG nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.100280] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.115113] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.229364] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Releasing lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.229935] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 693.230222] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.230626] env[62585]: DEBUG oslo_concurrency.lockutils [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] Acquired lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.230871] env[62585]: DEBUG nova.network.neutron [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Refreshing network info cache for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.232219] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03489a0b-f77a-430e-a388-48c5081ea0e8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.241854] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c03777-95d1-4e35-b8e8-72e67ba9e708 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.263808] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72cdccb7-b398-4833-af82-d64222c83f8d could not be found. [ 693.263975] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.264162] env[62585]: INFO nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 693.264396] env[62585]: DEBUG oslo.service.loopingcall [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.264604] env[62585]: DEBUG nova.compute.manager [-] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.264699] env[62585]: DEBUG nova.network.neutron [-] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.280196] env[62585]: DEBUG nova.network.neutron [-] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.362436] env[62585]: DEBUG nova.scheduler.client.report [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 693.618280] env[62585]: DEBUG nova.network.neutron [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.751066] env[62585]: DEBUG nova.network.neutron [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.782218] env[62585]: DEBUG nova.network.neutron [-] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.805587] env[62585]: DEBUG nova.network.neutron [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.868249] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.868951] env[62585]: ERROR nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Traceback (most recent call last): [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self.driver.spawn(context, instance, image_meta, [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] vm_ref = self.build_virtual_machine(instance, [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.868951] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] for vif in network_info: [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return self._sync_wrapper(fn, *args, **kwargs) [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self.wait() [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self[:] = self._gt.wait() [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return self._exit_event.wait() [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] result = hub.switch() [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.869360] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return self.greenlet.switch() [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] result = function(*args, **kwargs) [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] return func(*args, **kwargs) [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] raise e [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] nwinfo = self.network_api.allocate_for_instance( [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] created_port_ids = self._update_ports_for_instance( [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] with excutils.save_and_reraise_exception(): [ 693.869835] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] self.force_reraise() [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] raise self.value [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] updated_port = self._update_port( [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] _ensure_no_port_binding_failure(port) [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] raise exception.PortBindingFailed(port_id=port['id']) [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] nova.exception.PortBindingFailed: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. [ 693.870267] env[62585]: ERROR nova.compute.manager [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] [ 693.870631] env[62585]: DEBUG nova.compute.utils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 693.871376] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.397s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.873330] env[62585]: INFO nova.compute.claims [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.876269] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Build of instance 1cad8d1b-ed02-424c-879c-2f23d4d90b22 was re-scheduled: Binding failed for port 02f0d99b-ed9d-4b15-82e5-aa932eb0287d, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 693.876758] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 693.877584] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquiring lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.877584] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Acquired lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.877584] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.121386] env[62585]: INFO nova.compute.manager [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] [instance: 3b50dbde-2969-4a4b-ae35-42416342a60b] Took 1.02 seconds to deallocate network for instance. [ 694.285831] env[62585]: INFO nova.compute.manager [-] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Took 1.02 seconds to deallocate network for instance. [ 694.288165] env[62585]: DEBUG nova.compute.claims [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.288282] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.308295] env[62585]: DEBUG oslo_concurrency.lockutils [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] Releasing lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.308295] env[62585]: DEBUG nova.compute.manager [req-7424d546-f5a6-4cb2-8971-dc40e29778d3 req-c7a32696-589a-4388-ac3c-9f13d2e5a2a1 service nova] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Received event network-vif-deleted-e8a71e43-82b7-41db-871c-0cd81bf0c6a9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 694.398333] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.466420] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.970030] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Releasing lock "refresh_cache-1cad8d1b-ed02-424c-879c-2f23d4d90b22" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.970154] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 694.970330] env[62585]: DEBUG nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 694.970497] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.988578] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.148206] env[62585]: INFO nova.scheduler.client.report [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Deleted allocations for instance 3b50dbde-2969-4a4b-ae35-42416342a60b [ 695.228805] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937caf9f-b474-4c37-8bc4-9012059d5506 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.236296] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12855df-bba1-46a8-808a-942bfef6cbdc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.266237] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fee3e2-6cd1-48ec-b208-031116ffedfc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.272969] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c9d506-d1b4-49ce-8669-309589fdf3d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.285591] env[62585]: DEBUG nova.compute.provider_tree [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.494498] env[62585]: DEBUG nova.network.neutron [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.657537] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2b33a991-a296-4854-8828-ca824b2e8b05 tempest-MigrationsAdminTest-99143934 tempest-MigrationsAdminTest-99143934-project-member] Lock "3b50dbde-2969-4a4b-ae35-42416342a60b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.987s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.788735] env[62585]: DEBUG nova.scheduler.client.report [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.000808] env[62585]: INFO nova.compute.manager [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] [instance: 1cad8d1b-ed02-424c-879c-2f23d4d90b22] Took 1.03 seconds to deallocate network for instance. [ 696.163726] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 696.293639] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.293970] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 696.297778] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.847s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.299988] env[62585]: INFO nova.compute.claims [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.685549] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.799370] env[62585]: DEBUG nova.compute.utils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.801069] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 696.801252] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.859956] env[62585]: DEBUG nova.policy [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79e20a95276d44ce8b8323e1cbe05904', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2e49d4e1baa42e987434709bff86f37', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 697.038182] env[62585]: INFO nova.scheduler.client.report [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Deleted allocations for instance 1cad8d1b-ed02-424c-879c-2f23d4d90b22 [ 697.184144] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Successfully created port: e7341978-0b9f-44b2-92e1-f0e3f82a5750 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.304428] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 697.546162] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c19745c9-c518-4564-978a-35b8194c929c tempest-InstanceActionsTestJSON-2015899920 tempest-InstanceActionsTestJSON-2015899920-project-member] Lock "1cad8d1b-ed02-424c-879c-2f23d4d90b22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.277s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.620403] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae0c161-beb2-4c2b-b644-d2e872663aca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.628233] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51daf863-a8d0-417b-bb17-998a0de609d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.659475] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18e3328-0c0b-49e3-b53b-00f87531ab46 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.666244] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a3ecd6-d037-4751-b84f-c515418c8d71 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.679395] env[62585]: DEBUG nova.compute.provider_tree [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.026970] env[62585]: DEBUG nova.compute.manager [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Received event network-changed-e7341978-0b9f-44b2-92e1-f0e3f82a5750 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 698.026970] env[62585]: DEBUG nova.compute.manager [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Refreshing instance network info cache due to event network-changed-e7341978-0b9f-44b2-92e1-f0e3f82a5750. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 698.026970] env[62585]: DEBUG oslo_concurrency.lockutils [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] Acquiring lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.026970] env[62585]: DEBUG oslo_concurrency.lockutils [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] Acquired lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.026970] env[62585]: DEBUG nova.network.neutron [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Refreshing network info cache for port e7341978-0b9f-44b2-92e1-f0e3f82a5750 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.053806] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 698.188192] env[62585]: DEBUG nova.scheduler.client.report [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 698.318857] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 698.352660] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 698.352970] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 698.353140] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.354417] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 698.354652] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.354922] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 698.355102] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 698.359018] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 698.359018] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 698.359018] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 698.359018] env[62585]: DEBUG nova.virt.hardware [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.359018] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3f370f-bf3c-443e-bb35-c513c4ce454e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.370293] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801838f8-953b-40ba-91e2-f8aab2e6db92 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.389235] env[62585]: ERROR nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 698.389235] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.389235] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.389235] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.389235] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.389235] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.389235] env[62585]: ERROR nova.compute.manager raise self.value [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.389235] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 698.389235] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.389235] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 698.389785] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.389785] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 698.389785] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 698.389785] env[62585]: ERROR nova.compute.manager [ 698.389785] env[62585]: Traceback (most recent call last): [ 698.389785] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 698.389785] env[62585]: listener.cb(fileno) [ 698.389785] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.389785] env[62585]: result = function(*args, **kwargs) [ 698.389785] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.389785] env[62585]: return func(*args, **kwargs) [ 698.389785] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.389785] env[62585]: raise e [ 698.389785] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.389785] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 698.389785] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.389785] env[62585]: created_port_ids = self._update_ports_for_instance( [ 698.389785] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.389785] env[62585]: with excutils.save_and_reraise_exception(): [ 698.389785] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.389785] env[62585]: self.force_reraise() [ 698.389785] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.389785] env[62585]: raise self.value [ 698.389785] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.389785] env[62585]: updated_port = self._update_port( [ 698.389785] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.389785] env[62585]: _ensure_no_port_binding_failure(port) [ 698.389785] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.389785] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 698.390721] env[62585]: nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 698.390721] env[62585]: Removing descriptor: 15 [ 698.390721] env[62585]: ERROR nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Traceback (most recent call last): [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] yield resources [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self.driver.spawn(context, instance, image_meta, [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self._vmops.spawn(context, instance, image_meta, injected_files, [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 698.390721] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] vm_ref = self.build_virtual_machine(instance, [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] vif_infos = vmwarevif.get_vif_info(self._session, [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] for vif in network_info: [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return self._sync_wrapper(fn, *args, **kwargs) [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self.wait() [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self[:] = self._gt.wait() [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return self._exit_event.wait() [ 698.391262] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] result = hub.switch() [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return self.greenlet.switch() [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] result = function(*args, **kwargs) [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return func(*args, **kwargs) [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] raise e [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] nwinfo = self.network_api.allocate_for_instance( [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.391631] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] created_port_ids = self._update_ports_for_instance( [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] with excutils.save_and_reraise_exception(): [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self.force_reraise() [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] raise self.value [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] updated_port = self._update_port( [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] _ensure_no_port_binding_failure(port) [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.392052] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] raise exception.PortBindingFailed(port_id=port['id']) [ 698.392399] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 698.392399] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] [ 698.392399] env[62585]: INFO nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Terminating instance [ 698.393029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.545186] env[62585]: DEBUG nova.network.neutron [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.577926] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.631045] env[62585]: DEBUG nova.network.neutron [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.696638] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.697117] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 698.699653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.097s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.133727] env[62585]: DEBUG oslo_concurrency.lockutils [req-1202994b-9712-41bd-b03a-0dbc8ff47bfc req-af62ef21-3e09-4a09-b02e-845ad2ebd150 service nova] Releasing lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.134178] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquired lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.134363] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.209098] env[62585]: DEBUG nova.compute.utils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 699.211092] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 699.211269] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.311903] env[62585]: DEBUG nova.policy [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26abf4eaa71482b8fd3c6425a9c683d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48929b5f0c2c41ddade223ab57002fc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 699.544960] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4138472-808c-47da-b207-ac6fc4e20a9e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.555585] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95f3b2e-e435-4ea9-b55f-a70a0496e63c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.601169] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbb25a2-92d2-4fb1-8de4-7e5fa2b01933 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.606274] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd14e7f-a7eb-42ba-9f93-f21f2546ef8f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.621471] env[62585]: DEBUG nova.compute.provider_tree [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.653764] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.717411] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 699.757269] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.057170] env[62585]: DEBUG nova.compute.manager [req-c45548aa-ece3-4d1f-8822-732d7131a86c req-8c71624e-efd5-476d-85f9-347f3b488a8e service nova] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Received event network-vif-deleted-e7341978-0b9f-44b2-92e1-f0e3f82a5750 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 700.110234] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Successfully created port: 3997936f-598a-412a-995e-11e5eb424f7b {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.125018] env[62585]: DEBUG nova.scheduler.client.report [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 700.260186] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Releasing lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.260708] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 700.260955] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.261147] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46faf0ee-8deb-4cd7-af74-3fdcde0ae3ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.271390] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329b5f26-9954-4a72-90e4-746852602a93 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.294834] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 53e10c33-0f41-48a2-ac19-c0b34a9a9312 could not be found. [ 700.295078] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.295274] env[62585]: INFO nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Took 0.03 seconds to destroy the instance on the hypervisor. [ 700.295973] env[62585]: DEBUG oslo.service.loopingcall [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.295973] env[62585]: DEBUG nova.compute.manager [-] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.295973] env[62585]: DEBUG nova.network.neutron [-] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.324577] env[62585]: DEBUG nova.network.neutron [-] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.633745] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.934s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.637028] env[62585]: ERROR nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Traceback (most recent call last): [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self.driver.spawn(context, instance, image_meta, [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] vm_ref = self.build_virtual_machine(instance, [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] vif_infos = vmwarevif.get_vif_info(self._session, [ 700.637028] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] for vif in network_info: [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return self._sync_wrapper(fn, *args, **kwargs) [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self.wait() [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self[:] = self._gt.wait() [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return self._exit_event.wait() [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] result = hub.switch() [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 700.637648] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return self.greenlet.switch() [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] result = function(*args, **kwargs) [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] return func(*args, **kwargs) [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] raise e [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] nwinfo = self.network_api.allocate_for_instance( [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] created_port_ids = self._update_ports_for_instance( [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] with excutils.save_and_reraise_exception(): [ 700.638073] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] self.force_reraise() [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] raise self.value [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] updated_port = self._update_port( [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] _ensure_no_port_binding_failure(port) [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] raise exception.PortBindingFailed(port_id=port['id']) [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] nova.exception.PortBindingFailed: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. [ 700.638404] env[62585]: ERROR nova.compute.manager [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] [ 700.638698] env[62585]: DEBUG nova.compute.utils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 700.638698] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Build of instance ed0ec962-3c4e-409f-9332-0a79ca1c6ed3 was re-scheduled: Binding failed for port ae42ce76-2f63-4dd4-9a61-925fb66a7a38, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 700.638698] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 700.638698] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquiring lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.638840] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Acquired lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.639941] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 700.641132] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.740s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.645033] env[62585]: INFO nova.compute.claims [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.727276] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 700.763740] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 700.765171] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 700.765436] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.765673] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 700.765860] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.766058] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 700.766419] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 700.766510] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 700.766698] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 700.766895] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 700.767112] env[62585]: DEBUG nova.virt.hardware [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 700.768311] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a20647-6da7-4380-b296-b424083b8975 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.776390] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31278791-d0bc-463d-983d-a3ed0a551430 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.830807] env[62585]: DEBUG nova.network.neutron [-] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.188166] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.334410] env[62585]: INFO nova.compute.manager [-] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Took 1.04 seconds to deallocate network for instance. [ 701.336292] env[62585]: DEBUG nova.compute.claims [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 701.336476] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.342797] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.412039] env[62585]: ERROR nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 701.412039] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.412039] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.412039] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.412039] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.412039] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.412039] env[62585]: ERROR nova.compute.manager raise self.value [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.412039] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 701.412039] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.412039] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 701.412584] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.412584] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 701.412584] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 701.412584] env[62585]: ERROR nova.compute.manager [ 701.412584] env[62585]: Traceback (most recent call last): [ 701.412584] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 701.412584] env[62585]: listener.cb(fileno) [ 701.412584] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.412584] env[62585]: result = function(*args, **kwargs) [ 701.412584] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.412584] env[62585]: return func(*args, **kwargs) [ 701.412584] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.412584] env[62585]: raise e [ 701.412584] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.412584] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 701.412584] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.412584] env[62585]: created_port_ids = self._update_ports_for_instance( [ 701.412584] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.412584] env[62585]: with excutils.save_and_reraise_exception(): [ 701.412584] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.412584] env[62585]: self.force_reraise() [ 701.412584] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.412584] env[62585]: raise self.value [ 701.412584] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.412584] env[62585]: updated_port = self._update_port( [ 701.412584] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.412584] env[62585]: _ensure_no_port_binding_failure(port) [ 701.412584] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.412584] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 701.413378] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 701.413378] env[62585]: Removing descriptor: 15 [ 701.413378] env[62585]: ERROR nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Traceback (most recent call last): [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] yield resources [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self.driver.spawn(context, instance, image_meta, [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 701.413378] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] vm_ref = self.build_virtual_machine(instance, [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] vif_infos = vmwarevif.get_vif_info(self._session, [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] for vif in network_info: [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return self._sync_wrapper(fn, *args, **kwargs) [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self.wait() [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self[:] = self._gt.wait() [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return self._exit_event.wait() [ 701.413858] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] result = hub.switch() [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return self.greenlet.switch() [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] result = function(*args, **kwargs) [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return func(*args, **kwargs) [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] raise e [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] nwinfo = self.network_api.allocate_for_instance( [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.414278] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] created_port_ids = self._update_ports_for_instance( [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] with excutils.save_and_reraise_exception(): [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self.force_reraise() [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] raise self.value [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] updated_port = self._update_port( [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] _ensure_no_port_binding_failure(port) [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.414754] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] raise exception.PortBindingFailed(port_id=port['id']) [ 701.415124] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 701.415124] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] [ 701.415124] env[62585]: INFO nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Terminating instance [ 701.415993] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.416326] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.416529] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.847984] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Releasing lock "refresh_cache-ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.848251] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 701.848433] env[62585]: DEBUG nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 701.848596] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 701.872275] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.932536] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.002120] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2fac0c-ce55-4ca9-944c-411faa87180a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.009487] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdf8d27-f2e3-43b6-b34d-239988706f64 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.042585] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.044160] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fbccc5-8a49-442d-9da2-abf609484bbd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.051740] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe9d5b7-6a26-4467-85eb-4e3e928f1292 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.067408] env[62585]: DEBUG nova.compute.provider_tree [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.111837] env[62585]: DEBUG nova.compute.manager [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Received event network-changed-3997936f-598a-412a-995e-11e5eb424f7b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 702.111940] env[62585]: DEBUG nova.compute.manager [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Refreshing instance network info cache due to event network-changed-3997936f-598a-412a-995e-11e5eb424f7b. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 702.112595] env[62585]: DEBUG oslo_concurrency.lockutils [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] Acquiring lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.376989] env[62585]: DEBUG nova.network.neutron [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.549023] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.549023] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 702.549023] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.549329] env[62585]: DEBUG oslo_concurrency.lockutils [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] Acquired lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.549395] env[62585]: DEBUG nova.network.neutron [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Refreshing network info cache for port 3997936f-598a-412a-995e-11e5eb424f7b {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 702.550458] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e870d9f-f20b-45c8-bdc8-171107894ed4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.561184] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b13e4c-1944-4662-b8d1-c68687305a49 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.575023] env[62585]: DEBUG nova.scheduler.client.report [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 702.593036] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 424fc272-b4b9-4867-a083-b27abe308f81 could not be found. [ 702.593036] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 702.593036] env[62585]: INFO nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Took 0.04 seconds to destroy the instance on the hypervisor. [ 702.593036] env[62585]: DEBUG oslo.service.loopingcall [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.593036] env[62585]: DEBUG nova.compute.manager [-] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 702.593036] env[62585]: DEBUG nova.network.neutron [-] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.615107] env[62585]: DEBUG nova.network.neutron [-] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.878530] env[62585]: INFO nova.compute.manager [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] [instance: ed0ec962-3c4e-409f-9332-0a79ca1c6ed3] Took 1.03 seconds to deallocate network for instance. [ 703.069100] env[62585]: DEBUG nova.network.neutron [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.081658] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.082902] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 703.086535] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.082s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.120181] env[62585]: DEBUG nova.network.neutron [-] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.186241] env[62585]: DEBUG nova.network.neutron [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.595116] env[62585]: DEBUG nova.compute.utils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.601193] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 703.601363] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.622054] env[62585]: INFO nova.compute.manager [-] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Took 1.03 seconds to deallocate network for instance. [ 703.624238] env[62585]: DEBUG nova.compute.claims [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 703.624413] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.690612] env[62585]: DEBUG nova.policy [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f434794c6528422db6974b8c826b6dee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7020a4f68049440a91e6c04fcf5f8464', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 703.696598] env[62585]: DEBUG oslo_concurrency.lockutils [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] Releasing lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.696988] env[62585]: DEBUG nova.compute.manager [req-e92f49d4-4310-404e-9da3-93efbd07719e req-ef5f897f-81d2-4a30-b3c7-18a9c4bcf076 service nova] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Received event network-vif-deleted-3997936f-598a-412a-995e-11e5eb424f7b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 703.835825] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquiring lock "5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.836521] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Lock "5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.903795] env[62585]: INFO nova.scheduler.client.report [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Deleted allocations for instance ed0ec962-3c4e-409f-9332-0a79ca1c6ed3 [ 704.072073] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b4c0c7-9543-4619-a87a-47b3dcc1846c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.080383] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d209188-daba-45db-b1fb-a2387386154e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.116215] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 704.120266] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Successfully created port: 3cd023b5-a6ac-46ca-82da-ac2bb66b923d {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.122739] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b3f1b2-688a-4cdb-9a95-59a55862ab17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.130888] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228d762f-10e2-4239-a45e-2d31149895af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.145169] env[62585]: DEBUG nova.compute.provider_tree [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.411595] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c56aac0c-a268-4e3e-a053-d748a0c4de57 tempest-ServerPasswordTestJSON-1522375250 tempest-ServerPasswordTestJSON-1522375250-project-member] Lock "ed0ec962-3c4e-409f-9332-0a79ca1c6ed3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.473s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.648705] env[62585]: DEBUG nova.scheduler.client.report [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.914909] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 705.106253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "8a9daa60-e93a-4276-bf23-652ae7b0618b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.106394] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "8a9daa60-e93a-4276-bf23-652ae7b0618b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.131031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "4dfc00d9-64db-439e-baee-041562f7354b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.131031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "4dfc00d9-64db-439e-baee-041562f7354b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.137140] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 705.155453] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.069s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.156076] env[62585]: ERROR nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Traceback (most recent call last): [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self.driver.spawn(context, instance, image_meta, [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] vm_ref = self.build_virtual_machine(instance, [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.156076] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] for vif in network_info: [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return self._sync_wrapper(fn, *args, **kwargs) [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self.wait() [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self[:] = self._gt.wait() [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return self._exit_event.wait() [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] result = hub.switch() [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 705.156589] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return self.greenlet.switch() [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] result = function(*args, **kwargs) [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] return func(*args, **kwargs) [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] raise e [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] nwinfo = self.network_api.allocate_for_instance( [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] created_port_ids = self._update_ports_for_instance( [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] with excutils.save_and_reraise_exception(): [ 705.157186] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] self.force_reraise() [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] raise self.value [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] updated_port = self._update_port( [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] _ensure_no_port_binding_failure(port) [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] raise exception.PortBindingFailed(port_id=port['id']) [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] nova.exception.PortBindingFailed: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. [ 705.157779] env[62585]: ERROR nova.compute.manager [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] [ 705.158282] env[62585]: DEBUG nova.compute.utils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 705.158848] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Build of instance ae66c3e2-eac8-4239-b5be-64dc0dcf2c04 was re-scheduled: Binding failed for port 3dfed7bc-259f-44c5-a77f-4ce1f09e11cb, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 705.159277] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 705.159499] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquiring lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.159641] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Acquired lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.159826] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.163740] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.886s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.163740] env[62585]: DEBUG nova.objects.instance [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lazy-loading 'resources' on Instance uuid 16f01d66-44f8-4912-989a-48c39f667c95 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.174593] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 705.174817] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 705.175038] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.175289] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 705.175448] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.175595] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 705.175805] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 705.176175] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 705.176405] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 705.176581] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 705.176871] env[62585]: DEBUG nova.virt.hardware [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 705.178432] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fd37cb-46a0-4760-bbc6-83f7db0e9d1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.188136] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba2e2fc-409e-4f57-8bc3-6f986414893c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.369117] env[62585]: DEBUG nova.compute.manager [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Received event network-changed-3cd023b5-a6ac-46ca-82da-ac2bb66b923d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 705.369338] env[62585]: DEBUG nova.compute.manager [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Refreshing instance network info cache due to event network-changed-3cd023b5-a6ac-46ca-82da-ac2bb66b923d. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 705.369558] env[62585]: DEBUG oslo_concurrency.lockutils [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] Acquiring lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.369679] env[62585]: DEBUG oslo_concurrency.lockutils [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] Acquired lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.369833] env[62585]: DEBUG nova.network.neutron [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Refreshing network info cache for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 705.438651] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.680754] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.815020] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.854344] env[62585]: ERROR nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 705.854344] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.854344] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.854344] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.854344] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.854344] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.854344] env[62585]: ERROR nova.compute.manager raise self.value [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.854344] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 705.854344] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.854344] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 705.856033] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.856033] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 705.856033] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 705.856033] env[62585]: ERROR nova.compute.manager [ 705.856033] env[62585]: Traceback (most recent call last): [ 705.856033] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 705.856033] env[62585]: listener.cb(fileno) [ 705.856033] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.856033] env[62585]: result = function(*args, **kwargs) [ 705.856033] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 705.856033] env[62585]: return func(*args, **kwargs) [ 705.856033] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 705.856033] env[62585]: raise e [ 705.856033] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.856033] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 705.856033] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.856033] env[62585]: created_port_ids = self._update_ports_for_instance( [ 705.856033] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.856033] env[62585]: with excutils.save_and_reraise_exception(): [ 705.856033] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.856033] env[62585]: self.force_reraise() [ 705.856033] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.856033] env[62585]: raise self.value [ 705.856033] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.856033] env[62585]: updated_port = self._update_port( [ 705.856033] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.856033] env[62585]: _ensure_no_port_binding_failure(port) [ 705.856033] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.856033] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 705.858098] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 705.858098] env[62585]: Removing descriptor: 15 [ 705.858098] env[62585]: ERROR nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Traceback (most recent call last): [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] yield resources [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self.driver.spawn(context, instance, image_meta, [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.858098] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] vm_ref = self.build_virtual_machine(instance, [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] for vif in network_info: [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return self._sync_wrapper(fn, *args, **kwargs) [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self.wait() [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self[:] = self._gt.wait() [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return self._exit_event.wait() [ 705.858451] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] result = hub.switch() [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return self.greenlet.switch() [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] result = function(*args, **kwargs) [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return func(*args, **kwargs) [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] raise e [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] nwinfo = self.network_api.allocate_for_instance( [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.858816] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] created_port_ids = self._update_ports_for_instance( [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] with excutils.save_and_reraise_exception(): [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self.force_reraise() [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] raise self.value [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] updated_port = self._update_port( [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] _ensure_no_port_binding_failure(port) [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.859207] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] raise exception.PortBindingFailed(port_id=port['id']) [ 705.859511] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 705.859511] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] [ 705.859511] env[62585]: INFO nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Terminating instance [ 705.859511] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquiring lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.895110] env[62585]: DEBUG nova.network.neutron [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.997028] env[62585]: DEBUG nova.network.neutron [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.065527] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8298f7d-2803-47e8-931a-3dbfe68572a6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.074100] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a640121-0960-431d-88e7-75bde4f44e81 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.107373] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14f31fe-f003-4522-b2ff-a7fc97f9ad29 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.115765] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac65f39a-8416-463d-8800-6a07aaf46c73 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.130243] env[62585]: DEBUG nova.compute.provider_tree [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.320028] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Releasing lock "refresh_cache-ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.320028] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 706.320028] env[62585]: DEBUG nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.320028] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.338592] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.499963] env[62585]: DEBUG oslo_concurrency.lockutils [req-8ca6db46-18bc-4709-8c9f-a61d55c142ee req-7519e4a4-237a-4d5a-9c06-7d6e4a218c07 service nova] Releasing lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.500402] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquired lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.500587] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.633774] env[62585]: DEBUG nova.scheduler.client.report [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.841651] env[62585]: DEBUG nova.network.neutron [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.021789] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.132378] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.138180] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.975s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.140516] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.117s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.161383] env[62585]: INFO nova.scheduler.client.report [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Deleted allocations for instance 16f01d66-44f8-4912-989a-48c39f667c95 [ 707.345302] env[62585]: INFO nova.compute.manager [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] [instance: ae66c3e2-eac8-4239-b5be-64dc0dcf2c04] Took 1.03 seconds to deallocate network for instance. [ 707.393873] env[62585]: DEBUG nova.compute.manager [req-1a433dfb-0abc-4cc9-ad3d-6a1d2da1b0bd req-56ddcf0d-b4d7-4a8d-b20c-f7aa39c13ea3 service nova] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Received event network-vif-deleted-3cd023b5-a6ac-46ca-82da-ac2bb66b923d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 707.635259] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Releasing lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.635843] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 707.636102] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 707.636444] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30e6f521-0d15-4c01-8d50-28ada4b984d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.646069] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e733b19-8675-47ea-be01-f1119bd274d8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.674059] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0049c4a4-dfc2-4968-8ab1-61c344f32e6d could not be found. [ 707.674059] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 707.674332] env[62585]: INFO nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 707.676274] env[62585]: DEBUG oslo.service.loopingcall [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.676274] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6ed03ad1-4bc6-4ffb-963f-9a92d52cc28c tempest-ServerShowV247Test-165934211 tempest-ServerShowV247Test-165934211-project-member] Lock "16f01d66-44f8-4912-989a-48c39f667c95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.092s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.676274] env[62585]: DEBUG nova.compute.manager [-] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 707.676274] env[62585]: DEBUG nova.network.neutron [-] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.697616] env[62585]: DEBUG nova.network.neutron [-] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.964731] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238265c1-b6b8-4802-8501-8145c4873b43 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.974733] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac70921-97fc-4d9a-902b-a236b8432e81 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.000745] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c81ba58-9787-4ad8-a365-f9eac996027f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.008278] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bc8cc4-8f58-4149-a998-5f4fe1503766 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.022647] env[62585]: DEBUG nova.compute.provider_tree [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.202017] env[62585]: DEBUG nova.network.neutron [-] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.382783] env[62585]: INFO nova.scheduler.client.report [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Deleted allocations for instance ae66c3e2-eac8-4239-b5be-64dc0dcf2c04 [ 708.526674] env[62585]: DEBUG nova.scheduler.client.report [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.706797] env[62585]: INFO nova.compute.manager [-] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Took 1.03 seconds to deallocate network for instance. [ 708.708458] env[62585]: DEBUG nova.compute.claims [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 708.708963] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.890967] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0c09c7c8-4af3-4385-96b6-1491dc71d00a tempest-ServerActionsTestJSON-813877983 tempest-ServerActionsTestJSON-813877983-project-member] Lock "ae66c3e2-eac8-4239-b5be-64dc0dcf2c04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.572s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.032878] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.892s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.033684] env[62585]: ERROR nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Traceback (most recent call last): [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self.driver.spawn(context, instance, image_meta, [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] vm_ref = self.build_virtual_machine(instance, [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.033684] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] for vif in network_info: [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] return self._sync_wrapper(fn, *args, **kwargs) [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self.wait() [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self[:] = self._gt.wait() [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] return self._exit_event.wait() [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] current.throw(*self._exc) [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.034976] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] result = function(*args, **kwargs) [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] return func(*args, **kwargs) [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] raise e [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] nwinfo = self.network_api.allocate_for_instance( [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] created_port_ids = self._update_ports_for_instance( [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] with excutils.save_and_reraise_exception(): [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] self.force_reraise() [ 709.035371] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] raise self.value [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] updated_port = self._update_port( [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] _ensure_no_port_binding_failure(port) [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] raise exception.PortBindingFailed(port_id=port['id']) [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] nova.exception.PortBindingFailed: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. [ 709.036182] env[62585]: ERROR nova.compute.manager [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] [ 709.036182] env[62585]: DEBUG nova.compute.utils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 709.036638] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.138s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.040227] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Build of instance 0b4d919f-552e-489e-bcfb-f6447cf81fb8 was re-scheduled: Binding failed for port cc86cd95-3cb7-47e1-a436-ae6433f69748, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 709.042028] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 709.042028] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquiring lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.042028] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Acquired lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.042028] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.393521] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 709.569626] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.654352] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.894102] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fb5099-256d-4e7b-ab4e-5ee98d2340b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.907956] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bf7b94-a03c-4411-bba6-650ff73948be {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.939672] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.940529] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b3d4c9-7625-4fc0-a753-28152d34b7bf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.947836] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d0f37f-bffe-4247-9937-f1d315614529 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.961354] env[62585]: DEBUG nova.compute.provider_tree [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.157331] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Releasing lock "refresh_cache-0b4d919f-552e-489e-bcfb-f6447cf81fb8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.157579] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 710.157798] env[62585]: DEBUG nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.157993] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 710.173870] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.464550] env[62585]: DEBUG nova.scheduler.client.report [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.678494] env[62585]: DEBUG nova.network.neutron [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.970178] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.934s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.970800] env[62585]: ERROR nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Traceback (most recent call last): [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self.driver.spawn(context, instance, image_meta, [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] vm_ref = self.build_virtual_machine(instance, [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] vif_infos = vmwarevif.get_vif_info(self._session, [ 710.970800] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] for vif in network_info: [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] return self._sync_wrapper(fn, *args, **kwargs) [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self.wait() [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self[:] = self._gt.wait() [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] return self._exit_event.wait() [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] current.throw(*self._exc) [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.971413] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] result = function(*args, **kwargs) [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] return func(*args, **kwargs) [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] raise e [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] nwinfo = self.network_api.allocate_for_instance( [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] created_port_ids = self._update_ports_for_instance( [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] with excutils.save_and_reraise_exception(): [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] self.force_reraise() [ 710.971757] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] raise self.value [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] updated_port = self._update_port( [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] _ensure_no_port_binding_failure(port) [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] raise exception.PortBindingFailed(port_id=port['id']) [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] nova.exception.PortBindingFailed: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. [ 710.972238] env[62585]: ERROR nova.compute.manager [instance: 4557a853-232e-49e5-9052-ebf54d68e998] [ 710.972238] env[62585]: DEBUG nova.compute.utils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 710.972738] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.684s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.976140] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Build of instance 4557a853-232e-49e5-9052-ebf54d68e998 was re-scheduled: Binding failed for port 9361b60c-f746-4c08-b38a-bf00ba2faa45, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 710.977093] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 710.977093] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.977093] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquired lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.977093] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.180940] env[62585]: INFO nova.compute.manager [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] [instance: 0b4d919f-552e-489e-bcfb-f6447cf81fb8] Took 1.02 seconds to deallocate network for instance. [ 711.498177] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.561986] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.768142] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19beabf6-dd37-4798-b656-18542a2cc259 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.775486] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a3b453-c55e-4759-b88b-e4d05cf81ffd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.804825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399d39e8-b4e4-4722-9917-59c44bf2e941 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.812689] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f5913b-93a7-45d8-b99c-6ab51eb2b3a5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.825115] env[62585]: DEBUG nova.compute.provider_tree [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.064636] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Releasing lock "refresh_cache-4557a853-232e-49e5-9052-ebf54d68e998" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.064897] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 712.065079] env[62585]: DEBUG nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.065257] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.092309] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.215773] env[62585]: INFO nova.scheduler.client.report [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Deleted allocations for instance 0b4d919f-552e-489e-bcfb-f6447cf81fb8 [ 712.329140] env[62585]: DEBUG nova.scheduler.client.report [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.594768] env[62585]: DEBUG nova.network.neutron [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.726091] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d85f3afe-13a5-43b8-944c-89e1566472bb tempest-ServersNegativeTestMultiTenantJSON-1494129741 tempest-ServersNegativeTestMultiTenantJSON-1494129741-project-member] Lock "0b4d919f-552e-489e-bcfb-f6447cf81fb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.724s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.836025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.836025] env[62585]: ERROR nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Traceback (most recent call last): [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self.driver.spawn(context, instance, image_meta, [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.836025] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] vm_ref = self.build_virtual_machine(instance, [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] for vif in network_info: [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] return self._sync_wrapper(fn, *args, **kwargs) [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self.wait() [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self[:] = self._gt.wait() [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] return self._exit_event.wait() [ 712.836374] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] current.throw(*self._exc) [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] result = function(*args, **kwargs) [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] return func(*args, **kwargs) [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] raise e [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] nwinfo = self.network_api.allocate_for_instance( [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] created_port_ids = self._update_ports_for_instance( [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.836763] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] with excutils.save_and_reraise_exception(): [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] self.force_reraise() [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] raise self.value [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] updated_port = self._update_port( [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] _ensure_no_port_binding_failure(port) [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] raise exception.PortBindingFailed(port_id=port['id']) [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] nova.exception.PortBindingFailed: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. [ 712.837247] env[62585]: ERROR nova.compute.manager [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] [ 712.837615] env[62585]: DEBUG nova.compute.utils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 712.842015] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.153s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.842015] env[62585]: INFO nova.compute.claims [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.844175] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Build of instance 72cdccb7-b398-4833-af82-d64222c83f8d was re-scheduled: Binding failed for port e8a71e43-82b7-41db-871c-0cd81bf0c6a9, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 712.845426] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 712.845786] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquiring lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.846381] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Acquired lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.846667] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.097058] env[62585]: INFO nova.compute.manager [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 4557a853-232e-49e5-9052-ebf54d68e998] Took 1.03 seconds to deallocate network for instance. [ 713.234431] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 713.388311] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.461647] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.765190] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.969492] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Releasing lock "refresh_cache-72cdccb7-b398-4833-af82-d64222c83f8d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.970583] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 713.970583] env[62585]: DEBUG nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.970583] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.141678] env[62585]: INFO nova.scheduler.client.report [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Deleted allocations for instance 4557a853-232e-49e5-9052-ebf54d68e998 [ 714.152137] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.181323] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94455cc7-ba4b-4ee1-bfc5-898e913314b1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.194117] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525fe4a4-1eca-489b-a4c8-e6ae0794cfb8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.234100] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6229efb7-28ea-4c20-8893-7b05824134d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.240291] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a03efed-fbd7-499c-83a3-b71e98b68565 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.255267] env[62585]: DEBUG nova.compute.provider_tree [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.654187] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9c4f05ae-40d7-4483-887f-40835e25c65a tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "4557a853-232e-49e5-9052-ebf54d68e998" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.717s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.659576] env[62585]: DEBUG nova.network.neutron [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.758228] env[62585]: DEBUG nova.scheduler.client.report [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.157216] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.161448] env[62585]: INFO nova.compute.manager [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] [instance: 72cdccb7-b398-4833-af82-d64222c83f8d] Took 1.19 seconds to deallocate network for instance. [ 715.264027] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.265518] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.269409] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.691s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.273037] env[62585]: INFO nova.compute.claims [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.685552] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.779543] env[62585]: DEBUG nova.compute.utils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.783599] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 715.783699] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.852501] env[62585]: DEBUG nova.policy [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cbaeae4fa1e4dc996a4d8a364ea0dae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44e32d293ad64cd499926859857e023e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 716.130426] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Successfully created port: 1c02db82-9ce5-479c-82d4-3ae4efa61754 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.191615] env[62585]: INFO nova.scheduler.client.report [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Deleted allocations for instance 72cdccb7-b398-4833-af82-d64222c83f8d [ 716.284514] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.581977] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259642ed-913c-4858-aaea-e65c94711ecf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.589627] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7001ee-df87-44de-8a95-66f6ecc8ad3b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.624918] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74759569-f5bd-4ce5-aea0-8dfa9691fb60 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.632169] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10844b1-b983-4359-af05-3256f54ff78b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.645903] env[62585]: DEBUG nova.compute.provider_tree [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.707709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dc75be20-7ba4-4daa-8775-a1d71a3631ec tempest-ListImageFiltersTestJSON-1604609938 tempest-ListImageFiltersTestJSON-1604609938-project-member] Lock "72cdccb7-b398-4833-af82-d64222c83f8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.375s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.080061] env[62585]: DEBUG nova.compute.manager [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Received event network-changed-1c02db82-9ce5-479c-82d4-3ae4efa61754 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.080292] env[62585]: DEBUG nova.compute.manager [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Refreshing instance network info cache due to event network-changed-1c02db82-9ce5-479c-82d4-3ae4efa61754. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 717.080529] env[62585]: DEBUG oslo_concurrency.lockutils [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] Acquiring lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.080693] env[62585]: DEBUG oslo_concurrency.lockutils [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] Acquired lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.080822] env[62585]: DEBUG nova.network.neutron [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Refreshing network info cache for port 1c02db82-9ce5-479c-82d4-3ae4efa61754 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.148530] env[62585]: DEBUG nova.scheduler.client.report [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.199865] env[62585]: ERROR nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 717.199865] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.199865] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.199865] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.199865] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.199865] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.199865] env[62585]: ERROR nova.compute.manager raise self.value [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.199865] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 717.199865] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.199865] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 717.200847] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.200847] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 717.200847] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 717.200847] env[62585]: ERROR nova.compute.manager [ 717.200847] env[62585]: Traceback (most recent call last): [ 717.200847] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 717.200847] env[62585]: listener.cb(fileno) [ 717.200847] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.200847] env[62585]: result = function(*args, **kwargs) [ 717.200847] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.200847] env[62585]: return func(*args, **kwargs) [ 717.200847] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.200847] env[62585]: raise e [ 717.200847] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.200847] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 717.200847] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.200847] env[62585]: created_port_ids = self._update_ports_for_instance( [ 717.200847] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.200847] env[62585]: with excutils.save_and_reraise_exception(): [ 717.200847] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.200847] env[62585]: self.force_reraise() [ 717.200847] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.200847] env[62585]: raise self.value [ 717.200847] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.200847] env[62585]: updated_port = self._update_port( [ 717.200847] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.200847] env[62585]: _ensure_no_port_binding_failure(port) [ 717.200847] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.200847] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 717.201994] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 717.201994] env[62585]: Removing descriptor: 15 [ 717.211233] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.300030] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.321640] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.321887] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.322053] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.322243] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.322382] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.322526] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.322729] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.322886] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.323136] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.323319] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.323488] env[62585]: DEBUG nova.virt.hardware [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.324335] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1eb6cc-2dc3-4c43-a269-62d4921f8e73 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.332373] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc6d040-b8fb-4e2d-9bc6-6f4654a19e38 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.346314] env[62585]: ERROR nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Traceback (most recent call last): [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] yield resources [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self.driver.spawn(context, instance, image_meta, [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] vm_ref = self.build_virtual_machine(instance, [ 717.346314] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] for vif in network_info: [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] return self._sync_wrapper(fn, *args, **kwargs) [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self.wait() [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self[:] = self._gt.wait() [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] return self._exit_event.wait() [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.346715] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] current.throw(*self._exc) [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] result = function(*args, **kwargs) [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] return func(*args, **kwargs) [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] raise e [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] nwinfo = self.network_api.allocate_for_instance( [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] created_port_ids = self._update_ports_for_instance( [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] with excutils.save_and_reraise_exception(): [ 717.347317] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self.force_reraise() [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] raise self.value [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] updated_port = self._update_port( [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] _ensure_no_port_binding_failure(port) [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] raise exception.PortBindingFailed(port_id=port['id']) [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 717.347733] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] [ 717.347733] env[62585]: INFO nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Terminating instance [ 717.348658] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.599072] env[62585]: DEBUG nova.network.neutron [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.656148] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.656684] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 717.659263] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.323s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.743370] env[62585]: DEBUG nova.network.neutron [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.746719] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.102445] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.102686] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.164124] env[62585]: DEBUG nova.compute.utils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.168701] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 718.168879] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.210036] env[62585]: DEBUG nova.policy [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0be8fd9262b94f5480dc708bab3d8c95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4245f4698be846a9879e54e794f22000', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 718.245660] env[62585]: DEBUG oslo_concurrency.lockutils [req-6832745c-cf25-416a-9473-ebc97e3000d0 req-df953fc6-7d0d-42b1-8afc-bce4e9ba0728 service nova] Releasing lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.246113] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.246231] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.438216] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2fb85b-e82a-44c6-83d3-1a55cb95b58b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.449049] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188c5bce-02c2-4d14-a1e3-8aa5122a9952 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.487254] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8eb6d8-37c3-4f06-9dd7-141fb3f4a0e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.494501] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef8327c-c7a3-4adf-80ef-916a76f7bb68 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.500431] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Successfully created port: b03b948a-5106-4776-aa7a-bcb2b5bae2d4 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.510103] env[62585]: DEBUG nova.compute.provider_tree [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.607978] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.608947] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 718.669286] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 718.768732] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.929325] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.014657] env[62585]: DEBUG nova.scheduler.client.report [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.110749] env[62585]: DEBUG nova.compute.manager [req-12a7d99d-9ca6-4482-8d36-195818cdf478 req-c79edaaf-c620-4d48-80d6-d80e429e34aa service nova] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Received event network-vif-deleted-1c02db82-9ce5-479c-82d4-3ae4efa61754 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.111242] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Didn't find any instances for network info cache update. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 719.112566] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112566] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112566] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112566] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112566] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112566] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112812] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 719.112812] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.432357] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.432846] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 719.433068] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.433418] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aac44c89-d8b7-4734-a97a-66af1df7bd79 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.443854] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865d24b4-f37f-42aa-8a2e-a53dd725605c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.466252] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3 could not be found. [ 719.466339] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.466512] env[62585]: INFO nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 719.466758] env[62585]: DEBUG oslo.service.loopingcall [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.467064] env[62585]: DEBUG nova.compute.manager [-] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.467166] env[62585]: DEBUG nova.network.neutron [-] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.488302] env[62585]: ERROR nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 719.488302] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.488302] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.488302] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.488302] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.488302] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.488302] env[62585]: ERROR nova.compute.manager raise self.value [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.488302] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 719.488302] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.488302] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 719.488806] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.488806] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 719.488806] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 719.488806] env[62585]: ERROR nova.compute.manager [ 719.488806] env[62585]: Traceback (most recent call last): [ 719.488806] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 719.488806] env[62585]: listener.cb(fileno) [ 719.488806] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.488806] env[62585]: result = function(*args, **kwargs) [ 719.488806] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.488806] env[62585]: return func(*args, **kwargs) [ 719.488806] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.488806] env[62585]: raise e [ 719.488806] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.488806] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 719.488806] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.488806] env[62585]: created_port_ids = self._update_ports_for_instance( [ 719.488806] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.488806] env[62585]: with excutils.save_and_reraise_exception(): [ 719.488806] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.488806] env[62585]: self.force_reraise() [ 719.488806] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.488806] env[62585]: raise self.value [ 719.488806] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.488806] env[62585]: updated_port = self._update_port( [ 719.488806] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.488806] env[62585]: _ensure_no_port_binding_failure(port) [ 719.488806] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.488806] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 719.489692] env[62585]: nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 719.489692] env[62585]: Removing descriptor: 15 [ 719.519810] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.860s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.520681] env[62585]: ERROR nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Traceback (most recent call last): [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self.driver.spawn(context, instance, image_meta, [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] vm_ref = self.build_virtual_machine(instance, [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.520681] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] for vif in network_info: [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return self._sync_wrapper(fn, *args, **kwargs) [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self.wait() [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self[:] = self._gt.wait() [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return self._exit_event.wait() [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] result = hub.switch() [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 719.521058] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return self.greenlet.switch() [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] result = function(*args, **kwargs) [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] return func(*args, **kwargs) [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] raise e [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] nwinfo = self.network_api.allocate_for_instance( [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] created_port_ids = self._update_ports_for_instance( [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] with excutils.save_and_reraise_exception(): [ 719.521408] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] self.force_reraise() [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] raise self.value [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] updated_port = self._update_port( [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] _ensure_no_port_binding_failure(port) [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] raise exception.PortBindingFailed(port_id=port['id']) [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] nova.exception.PortBindingFailed: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. [ 719.521759] env[62585]: ERROR nova.compute.manager [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] [ 719.522067] env[62585]: DEBUG nova.compute.utils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 719.523754] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.899s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.527374] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Build of instance 53e10c33-0f41-48a2-ac19-c0b34a9a9312 was re-scheduled: Binding failed for port e7341978-0b9f-44b2-92e1-f0e3f82a5750, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 719.528320] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 719.528783] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.528980] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquired lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.529175] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.542867] env[62585]: DEBUG nova.network.neutron [-] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.615835] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.678420] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 719.708322] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 719.708571] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 719.708726] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.708905] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 719.709063] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.709211] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 719.709409] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 719.709567] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 719.709729] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 719.709885] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 719.710067] env[62585]: DEBUG nova.virt.hardware [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 719.711241] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baadbef-a556-4319-9eba-008fe12312de {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.719857] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254c39c7-8bd2-40db-a4f4-3ca5132a93b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.735208] env[62585]: ERROR nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Traceback (most recent call last): [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] yield resources [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self.driver.spawn(context, instance, image_meta, [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] vm_ref = self.build_virtual_machine(instance, [ 719.735208] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] for vif in network_info: [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] return self._sync_wrapper(fn, *args, **kwargs) [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self.wait() [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self[:] = self._gt.wait() [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] return self._exit_event.wait() [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 719.735579] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] current.throw(*self._exc) [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] result = function(*args, **kwargs) [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] return func(*args, **kwargs) [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] raise e [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] nwinfo = self.network_api.allocate_for_instance( [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] created_port_ids = self._update_ports_for_instance( [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] with excutils.save_and_reraise_exception(): [ 719.736015] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self.force_reraise() [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] raise self.value [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] updated_port = self._update_port( [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] _ensure_no_port_binding_failure(port) [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] raise exception.PortBindingFailed(port_id=port['id']) [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 719.736420] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] [ 719.736420] env[62585]: INFO nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Terminating instance [ 719.736865] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquiring lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.736865] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquired lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.736865] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.044116] env[62585]: DEBUG nova.network.neutron [-] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.057907] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.213255] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.263076] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.410355] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2d0643-f43b-4db3-b853-d0e137c13d43 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.419423] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adadfce-c5c1-40d1-bcf5-f71f2429db24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.454986] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.456915] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5405ab8b-c7a6-48a6-b3dc-b98cd4498013 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.465461] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127b64ef-bac9-4e1f-920b-c3ea52bee09a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.480526] env[62585]: DEBUG nova.compute.provider_tree [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.546278] env[62585]: INFO nova.compute.manager [-] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Took 1.08 seconds to deallocate network for instance. [ 720.550069] env[62585]: DEBUG nova.compute.claims [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 720.550285] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.715377] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Releasing lock "refresh_cache-53e10c33-0f41-48a2-ac19-c0b34a9a9312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.715636] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 720.715888] env[62585]: DEBUG nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.716086] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 720.741040] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.961245] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Releasing lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.961878] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 720.964141] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.964141] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-381b1aba-d270-4dd5-836b-1161a808f0ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.975236] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05c428e-96ed-4432-9de1-7877cc2268dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.997532] env[62585]: DEBUG nova.scheduler.client.report [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 721.018838] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2fccf900-e294-4d66-93c5-d1c7570c5d7e could not be found. [ 721.018838] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.019024] env[62585]: INFO nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 721.019282] env[62585]: DEBUG oslo.service.loopingcall [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 721.019774] env[62585]: DEBUG nova.compute.manager [-] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 721.019880] env[62585]: DEBUG nova.network.neutron [-] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.051854] env[62585]: DEBUG nova.network.neutron [-] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.191495] env[62585]: DEBUG nova.compute.manager [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Received event network-changed-b03b948a-5106-4776-aa7a-bcb2b5bae2d4 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.192796] env[62585]: DEBUG nova.compute.manager [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Refreshing instance network info cache due to event network-changed-b03b948a-5106-4776-aa7a-bcb2b5bae2d4. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 721.193220] env[62585]: DEBUG oslo_concurrency.lockutils [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] Acquiring lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.193592] env[62585]: DEBUG oslo_concurrency.lockutils [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] Acquired lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.193686] env[62585]: DEBUG nova.network.neutron [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Refreshing network info cache for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.242457] env[62585]: DEBUG nova.network.neutron [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.504223] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.980s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.504969] env[62585]: ERROR nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Traceback (most recent call last): [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self.driver.spawn(context, instance, image_meta, [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] vm_ref = self.build_virtual_machine(instance, [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.504969] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] for vif in network_info: [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return self._sync_wrapper(fn, *args, **kwargs) [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self.wait() [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self[:] = self._gt.wait() [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return self._exit_event.wait() [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] result = hub.switch() [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 721.505500] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return self.greenlet.switch() [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] result = function(*args, **kwargs) [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] return func(*args, **kwargs) [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] raise e [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] nwinfo = self.network_api.allocate_for_instance( [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] created_port_ids = self._update_ports_for_instance( [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] with excutils.save_and_reraise_exception(): [ 721.506079] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] self.force_reraise() [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] raise self.value [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] updated_port = self._update_port( [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] _ensure_no_port_binding_failure(port) [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] raise exception.PortBindingFailed(port_id=port['id']) [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] nova.exception.PortBindingFailed: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. [ 721.506431] env[62585]: ERROR nova.compute.manager [instance: 424fc272-b4b9-4867-a083-b27abe308f81] [ 721.506737] env[62585]: DEBUG nova.compute.utils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 721.507894] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Build of instance 424fc272-b4b9-4867-a083-b27abe308f81 was re-scheduled: Binding failed for port 3997936f-598a-412a-995e-11e5eb424f7b, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 721.509156] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 721.509156] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.509156] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.509156] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.510790] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.072s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.512611] env[62585]: INFO nova.compute.claims [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.555331] env[62585]: DEBUG nova.network.neutron [-] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.718925] env[62585]: DEBUG nova.network.neutron [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.746152] env[62585]: INFO nova.compute.manager [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 53e10c33-0f41-48a2-ac19-c0b34a9a9312] Took 1.03 seconds to deallocate network for instance. [ 721.819892] env[62585]: DEBUG nova.network.neutron [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.034668] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.058560] env[62585]: INFO nova.compute.manager [-] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Took 1.04 seconds to deallocate network for instance. [ 722.061300] env[62585]: DEBUG nova.compute.claims [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 722.061300] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.123791] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.323073] env[62585]: DEBUG oslo_concurrency.lockutils [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] Releasing lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.323293] env[62585]: DEBUG nova.compute.manager [req-16c7819d-2826-4283-b4c0-2de2e08fc4c8 req-51b320c5-4bbf-4fa9-b064-bbb1335d3636 service nova] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Received event network-vif-deleted-b03b948a-5106-4776-aa7a-bcb2b5bae2d4 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 722.482384] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "e4bd743b-b3a6-4872-9e33-a0183b976292" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.482610] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "e4bd743b-b3a6-4872-9e33-a0183b976292" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.626407] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-424fc272-b4b9-4867-a083-b27abe308f81" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.626671] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 722.626808] env[62585]: DEBUG nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 722.627116] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.641925] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.772226] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dc6114-7058-4644-b922-777f5f788b9b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.780022] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67c1b55-86cd-42f5-8e2f-9f25c006280a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.783585] env[62585]: INFO nova.scheduler.client.report [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Deleted allocations for instance 53e10c33-0f41-48a2-ac19-c0b34a9a9312 [ 722.820480] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7c7226-5bc0-47ad-abeb-db4c88ce1e29 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.828775] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0e60f4-1794-4116-a66d-573d7af39eb5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.842277] env[62585]: DEBUG nova.compute.provider_tree [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.147474] env[62585]: DEBUG nova.network.neutron [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.291296] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95a168f0-8fa1-4ca7-9ba4-26e751620ec8 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "53e10c33-0f41-48a2-ac19-c0b34a9a9312" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.247s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.345802] env[62585]: DEBUG nova.scheduler.client.report [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.650666] env[62585]: INFO nova.compute.manager [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: 424fc272-b4b9-4867-a083-b27abe308f81] Took 1.02 seconds to deallocate network for instance. [ 723.794084] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 723.852136] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.852651] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 723.855189] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.146s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.322840] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "54e0a14b-cc4f-4445-8d86-f25cc410d7d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.323120] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "54e0a14b-cc4f-4445-8d86-f25cc410d7d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.327577] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.360648] env[62585]: DEBUG nova.compute.utils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.364956] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 724.365251] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 724.403618] env[62585]: DEBUG nova.policy [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9a2336e0b124f03ad700405bcad8f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b8936eaf754cbcbd1b099846a3146d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 724.629674] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8b3e67-f8a7-4369-bdb9-fc4d34cc0645 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.637539] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f35825-6cc6-448b-b5ee-86af3ae3024c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.669193] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0881701c-91d4-40ed-b7cc-7661c22cab95 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.676338] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a45b3b5-549f-4ee3-94a8-9bd90ebe5690 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.690895] env[62585]: DEBUG nova.compute.provider_tree [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.692803] env[62585]: INFO nova.scheduler.client.report [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleted allocations for instance 424fc272-b4b9-4867-a083-b27abe308f81 [ 724.698470] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Successfully created port: 5e91260d-350d-4dba-b3e1-d8e7abfad0a5 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 724.865989] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 725.202015] env[62585]: DEBUG nova.scheduler.client.report [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.207524] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe438aab-829c-4657-bb8a-b1cc5c08d02d tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "424fc272-b4b9-4867-a083-b27abe308f81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.641s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.647673] env[62585]: DEBUG nova.compute.manager [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Received event network-changed-5e91260d-350d-4dba-b3e1-d8e7abfad0a5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.647775] env[62585]: DEBUG nova.compute.manager [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Refreshing instance network info cache due to event network-changed-5e91260d-350d-4dba-b3e1-d8e7abfad0a5. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.648213] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] Acquiring lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.648392] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] Acquired lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.648583] env[62585]: DEBUG nova.network.neutron [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Refreshing network info cache for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.708559] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.853s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.712019] env[62585]: ERROR nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Traceback (most recent call last): [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self.driver.spawn(context, instance, image_meta, [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] vm_ref = self.build_virtual_machine(instance, [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.712019] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] for vif in network_info: [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return self._sync_wrapper(fn, *args, **kwargs) [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self.wait() [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self[:] = self._gt.wait() [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return self._exit_event.wait() [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] result = hub.switch() [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 725.712573] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return self.greenlet.switch() [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] result = function(*args, **kwargs) [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] return func(*args, **kwargs) [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] raise e [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] nwinfo = self.network_api.allocate_for_instance( [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] created_port_ids = self._update_ports_for_instance( [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] with excutils.save_and_reraise_exception(): [ 725.713075] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] self.force_reraise() [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] raise self.value [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] updated_port = self._update_port( [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] _ensure_no_port_binding_failure(port) [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] raise exception.PortBindingFailed(port_id=port['id']) [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] nova.exception.PortBindingFailed: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. [ 725.713480] env[62585]: ERROR nova.compute.manager [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] [ 725.713839] env[62585]: DEBUG nova.compute.utils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 725.713839] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.772s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.718067] env[62585]: INFO nova.compute.claims [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.718067] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 725.722027] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Build of instance 0049c4a4-dfc2-4968-8ab1-61c344f32e6d was re-scheduled: Binding failed for port 3cd023b5-a6ac-46ca-82da-ac2bb66b923d, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 725.722027] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 725.722027] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquiring lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.722027] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Acquired lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.722257] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.876540] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 725.898661] env[62585]: ERROR nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 725.898661] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.898661] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.898661] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.898661] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.898661] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.898661] env[62585]: ERROR nova.compute.manager raise self.value [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.898661] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 725.898661] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.898661] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 725.899177] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.899177] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 725.899177] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 725.899177] env[62585]: ERROR nova.compute.manager [ 725.899177] env[62585]: Traceback (most recent call last): [ 725.899177] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 725.899177] env[62585]: listener.cb(fileno) [ 725.899177] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.899177] env[62585]: result = function(*args, **kwargs) [ 725.899177] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.899177] env[62585]: return func(*args, **kwargs) [ 725.899177] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.899177] env[62585]: raise e [ 725.899177] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.899177] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 725.899177] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.899177] env[62585]: created_port_ids = self._update_ports_for_instance( [ 725.899177] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.899177] env[62585]: with excutils.save_and_reraise_exception(): [ 725.899177] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.899177] env[62585]: self.force_reraise() [ 725.899177] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.899177] env[62585]: raise self.value [ 725.899177] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.899177] env[62585]: updated_port = self._update_port( [ 725.899177] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.899177] env[62585]: _ensure_no_port_binding_failure(port) [ 725.899177] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.899177] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 725.900060] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 725.900060] env[62585]: Removing descriptor: 17 [ 725.904250] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.904490] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.904674] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.904907] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.905024] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.905179] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.905387] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.905540] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.905702] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.905857] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.906033] env[62585]: DEBUG nova.virt.hardware [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.906881] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86af963b-5726-473e-94b2-17303e9d4f51 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.915250] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d52787a-ffc9-45f6-ae68-a4e7f504c3aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.929237] env[62585]: ERROR nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Traceback (most recent call last): [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] yield resources [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self.driver.spawn(context, instance, image_meta, [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] vm_ref = self.build_virtual_machine(instance, [ 725.929237] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] for vif in network_info: [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] return self._sync_wrapper(fn, *args, **kwargs) [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self.wait() [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self[:] = self._gt.wait() [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] return self._exit_event.wait() [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 725.929630] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] current.throw(*self._exc) [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] result = function(*args, **kwargs) [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] return func(*args, **kwargs) [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] raise e [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] nwinfo = self.network_api.allocate_for_instance( [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] created_port_ids = self._update_ports_for_instance( [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] with excutils.save_and_reraise_exception(): [ 725.929994] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self.force_reraise() [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] raise self.value [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] updated_port = self._update_port( [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] _ensure_no_port_binding_failure(port) [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] raise exception.PortBindingFailed(port_id=port['id']) [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 725.930387] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] [ 725.930387] env[62585]: INFO nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Terminating instance [ 725.931423] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.168805] env[62585]: DEBUG nova.network.neutron [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.255058] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.258910] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.307992] env[62585]: DEBUG nova.network.neutron [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.345141] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.812635] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac81cab7-4c48-40d1-9166-2b263bf3d028 req-3d054704-b364-4db3-ae53-749cd553f238 service nova] Releasing lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.812946] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.813503] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.852602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Releasing lock "refresh_cache-0049c4a4-dfc2-4968-8ab1-61c344f32e6d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.852859] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 726.853049] env[62585]: DEBUG nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 726.853240] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.879110] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.980883] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27381737-a1eb-4ea3-82e4-25dc1628e17b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.988248] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889e7541-7d32-4838-a327-3813976692a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.016543] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fe2d55-e1ee-4297-9835-c9bc4b7e313d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.024059] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6477850-3edd-434b-ba1d-1b2f6981be60 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.035970] env[62585]: DEBUG nova.compute.provider_tree [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.330039] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.382027] env[62585]: DEBUG nova.network.neutron [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.414573] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.450753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "aed35d7d-f826-4601-aa4e-1d1dccd51d3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.450940] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "aed35d7d-f826-4601-aa4e-1d1dccd51d3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.539390] env[62585]: DEBUG nova.scheduler.client.report [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.674277] env[62585]: DEBUG nova.compute.manager [req-8290ae33-cfb0-44ec-af8c-96444f9f733b req-467214ee-c154-4882-80f9-b43afa4d13df service nova] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Received event network-vif-deleted-5e91260d-350d-4dba-b3e1-d8e7abfad0a5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.884509] env[62585]: INFO nova.compute.manager [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] [instance: 0049c4a4-dfc2-4968-8ab1-61c344f32e6d] Took 1.03 seconds to deallocate network for instance. [ 727.916977] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.917480] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 727.917683] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.918959] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7c95d5b-97c6-4c83-bdf0-bdce06abffc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.928253] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cad0c7-22f4-4e07-a78b-c91ba0845827 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.950866] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7504c221-2d27-4dc6-9100-9a2dca2a6036 could not be found. [ 727.951107] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.951289] env[62585]: INFO nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Took 0.03 seconds to destroy the instance on the hypervisor. [ 727.951525] env[62585]: DEBUG oslo.service.loopingcall [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.951744] env[62585]: DEBUG nova.compute.manager [-] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 727.951835] env[62585]: DEBUG nova.network.neutron [-] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.966280] env[62585]: DEBUG nova.network.neutron [-] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.044226] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.045062] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 728.047925] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.283s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.049333] env[62585]: INFO nova.compute.claims [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.469196] env[62585]: DEBUG nova.network.neutron [-] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.554402] env[62585]: DEBUG nova.compute.utils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.557610] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 728.557782] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 728.619016] env[62585]: DEBUG nova.policy [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5b4bb7b573a4a06ae04dc981250878f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ce90d795bc74d71a8ed867e3a8cf903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 728.914082] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Successfully created port: 4f7145a0-f972-474b-912b-03da34495d70 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.922123] env[62585]: INFO nova.scheduler.client.report [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Deleted allocations for instance 0049c4a4-dfc2-4968-8ab1-61c344f32e6d [ 728.971772] env[62585]: INFO nova.compute.manager [-] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Took 1.02 seconds to deallocate network for instance. [ 728.975022] env[62585]: DEBUG nova.compute.claims [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 728.975022] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.059019] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 729.365460] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce7a37f-95fe-4a97-ba4b-2797e4a5e194 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.373909] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88647cb1-b117-4d78-96ae-6534575285cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.405097] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8cc848-a05d-4319-ba19-66153fedaa22 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.412783] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b640b19b-4fdd-4ba8-ba8a-dcf2d5c6d3eb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.426206] env[62585]: DEBUG nova.compute.provider_tree [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.430378] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e801e7ba-76b3-4207-88d6-20282d8043c8 tempest-AttachInterfacesUnderV243Test-2110182679 tempest-AttachInterfacesUnderV243Test-2110182679-project-member] Lock "0049c4a4-dfc2-4968-8ab1-61c344f32e6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.356s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.684206] env[62585]: DEBUG nova.compute.manager [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Received event network-changed-4f7145a0-f972-474b-912b-03da34495d70 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.684206] env[62585]: DEBUG nova.compute.manager [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Refreshing instance network info cache due to event network-changed-4f7145a0-f972-474b-912b-03da34495d70. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 729.684377] env[62585]: DEBUG oslo_concurrency.lockutils [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] Acquiring lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.684628] env[62585]: DEBUG oslo_concurrency.lockutils [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] Acquired lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.684857] env[62585]: DEBUG nova.network.neutron [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Refreshing network info cache for port 4f7145a0-f972-474b-912b-03da34495d70 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 729.891190] env[62585]: ERROR nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 729.891190] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.891190] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.891190] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.891190] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.891190] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.891190] env[62585]: ERROR nova.compute.manager raise self.value [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.891190] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 729.891190] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.891190] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 729.893180] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.893180] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 729.893180] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 729.893180] env[62585]: ERROR nova.compute.manager [ 729.893180] env[62585]: Traceback (most recent call last): [ 729.893180] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 729.893180] env[62585]: listener.cb(fileno) [ 729.893180] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.893180] env[62585]: result = function(*args, **kwargs) [ 729.893180] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.893180] env[62585]: return func(*args, **kwargs) [ 729.893180] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.893180] env[62585]: raise e [ 729.893180] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.893180] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 729.893180] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.893180] env[62585]: created_port_ids = self._update_ports_for_instance( [ 729.893180] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.893180] env[62585]: with excutils.save_and_reraise_exception(): [ 729.893180] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.893180] env[62585]: self.force_reraise() [ 729.893180] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.893180] env[62585]: raise self.value [ 729.893180] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.893180] env[62585]: updated_port = self._update_port( [ 729.893180] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.893180] env[62585]: _ensure_no_port_binding_failure(port) [ 729.893180] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.893180] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 729.894075] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 729.894075] env[62585]: Removing descriptor: 15 [ 729.929759] env[62585]: DEBUG nova.scheduler.client.report [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.936349] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 730.069908] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 730.093900] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 730.094150] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 730.094307] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.094483] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 730.094626] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.094768] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 730.094967] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 730.095254] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 730.095438] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 730.095598] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 730.095765] env[62585]: DEBUG nova.virt.hardware [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 730.096620] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6293489f-55e7-49d6-ae6d-ad5e04aade3c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.106222] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e30bdf-42c2-43ad-9afe-4ab52a5aba8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.119788] env[62585]: ERROR nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Traceback (most recent call last): [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] yield resources [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self.driver.spawn(context, instance, image_meta, [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] vm_ref = self.build_virtual_machine(instance, [ 730.119788] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] for vif in network_info: [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] return self._sync_wrapper(fn, *args, **kwargs) [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self.wait() [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self[:] = self._gt.wait() [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] return self._exit_event.wait() [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 730.120511] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] current.throw(*self._exc) [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] result = function(*args, **kwargs) [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] return func(*args, **kwargs) [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] raise e [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] nwinfo = self.network_api.allocate_for_instance( [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] created_port_ids = self._update_ports_for_instance( [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] with excutils.save_and_reraise_exception(): [ 730.121120] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self.force_reraise() [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] raise self.value [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] updated_port = self._update_port( [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] _ensure_no_port_binding_failure(port) [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] raise exception.PortBindingFailed(port_id=port['id']) [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 730.121720] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] [ 730.121720] env[62585]: INFO nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Terminating instance [ 730.122287] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquiring lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.206702] env[62585]: DEBUG nova.network.neutron [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.351998] env[62585]: DEBUG nova.network.neutron [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.439050] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.442270] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.757s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.443649] env[62585]: INFO nova.compute.claims [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.461351] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.855015] env[62585]: DEBUG oslo_concurrency.lockutils [req-39ae0ab3-0e09-4728-9df4-7c1ce5da69d1 req-10f65591-b46a-4e4f-a4ec-5136bcf9edcf service nova] Releasing lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.855476] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquired lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.855660] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.943921] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquiring lock "6acbfc64-354d-4b24-9007-2d6d79c2d157" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.944282] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "6acbfc64-354d-4b24-9007-2d6d79c2d157" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.382480] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.447963] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "6acbfc64-354d-4b24-9007-2d6d79c2d157" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.448637] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 731.509810] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.696801] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6be156-f3a0-4140-8b62-2339dd5d76d9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.703945] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0cdd45-b35b-48c0-9b14-568d829d98e4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.709374] env[62585]: DEBUG nova.compute.manager [req-ad7228e0-7e70-4935-aceb-0ddb9f7eb828 req-793c92a1-c55b-4ee8-b9ae-d919f9c4a36c service nova] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Received event network-vif-deleted-4f7145a0-f972-474b-912b-03da34495d70 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 731.734417] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19eab45a-0173-47bd-9314-586cd768be1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.742875] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f59f705-c0d5-45d7-b3f2-deb1b9200188 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.756430] env[62585]: DEBUG nova.compute.provider_tree [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.956038] env[62585]: DEBUG nova.compute.utils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 731.957542] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 731.957542] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.004793] env[62585]: DEBUG nova.policy [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '684e738ee1ea4ceca8b083e6fe110070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e9a83eacbb248198de8efc02ed9b7a6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 732.014045] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Releasing lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.014473] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 732.014663] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.014961] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1ff2c01-67a6-4d71-8d16-ba399cb7e8d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.024707] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3e259c-4089-45ae-ada0-cf7f1abead07 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.046078] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a0512ab3-1248-4f38-8ed9-249ba5a2d488 could not be found. [ 732.046348] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.046546] env[62585]: INFO nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Took 0.03 seconds to destroy the instance on the hypervisor. [ 732.046822] env[62585]: DEBUG oslo.service.loopingcall [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.047088] env[62585]: DEBUG nova.compute.manager [-] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 732.047179] env[62585]: DEBUG nova.network.neutron [-] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.065025] env[62585]: DEBUG nova.network.neutron [-] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.259012] env[62585]: DEBUG nova.scheduler.client.report [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.460784] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 732.547765] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Successfully created port: 012c7730-176b-4987-8e0f-ba1c9678fb48 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.567667] env[62585]: DEBUG nova.network.neutron [-] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.763835] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.764249] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 732.766840] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.021s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.768270] env[62585]: INFO nova.compute.claims [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.070051] env[62585]: INFO nova.compute.manager [-] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Took 1.02 seconds to deallocate network for instance. [ 733.072428] env[62585]: DEBUG nova.compute.claims [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 733.072644] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.272416] env[62585]: DEBUG nova.compute.utils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.278569] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 733.278834] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 733.318684] env[62585]: DEBUG nova.policy [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3143e277496f4645aaae13efe92d5c41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecbd6f90e8ee48d29f20d36bf5ba1140', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 733.445889] env[62585]: ERROR nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 733.445889] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.445889] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.445889] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.445889] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.445889] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.445889] env[62585]: ERROR nova.compute.manager raise self.value [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.445889] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 733.445889] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.445889] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 733.446484] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.446484] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 733.446484] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 733.446484] env[62585]: ERROR nova.compute.manager [ 733.446484] env[62585]: Traceback (most recent call last): [ 733.446484] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 733.446484] env[62585]: listener.cb(fileno) [ 733.446484] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.446484] env[62585]: result = function(*args, **kwargs) [ 733.446484] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.446484] env[62585]: return func(*args, **kwargs) [ 733.446484] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.446484] env[62585]: raise e [ 733.446484] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.446484] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 733.446484] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.446484] env[62585]: created_port_ids = self._update_ports_for_instance( [ 733.446484] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.446484] env[62585]: with excutils.save_and_reraise_exception(): [ 733.446484] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.446484] env[62585]: self.force_reraise() [ 733.446484] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.446484] env[62585]: raise self.value [ 733.446484] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.446484] env[62585]: updated_port = self._update_port( [ 733.446484] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.446484] env[62585]: _ensure_no_port_binding_failure(port) [ 733.446484] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.446484] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 733.447605] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 733.447605] env[62585]: Removing descriptor: 15 [ 733.472565] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 733.497993] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.498255] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.498408] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.498595] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 733.498741] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.498884] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 733.499170] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 733.499399] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 733.499583] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 733.499744] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 733.499911] env[62585]: DEBUG nova.virt.hardware [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 733.500764] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dff0b2-4e51-473e-8665-cd80674c9e8a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.508984] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fb02e5-980e-453d-a511-6bfdb8b827a9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.523085] env[62585]: ERROR nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Traceback (most recent call last): [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] yield resources [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self.driver.spawn(context, instance, image_meta, [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] vm_ref = self.build_virtual_machine(instance, [ 733.523085] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] for vif in network_info: [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] return self._sync_wrapper(fn, *args, **kwargs) [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self.wait() [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self[:] = self._gt.wait() [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] return self._exit_event.wait() [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 733.523530] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] current.throw(*self._exc) [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] result = function(*args, **kwargs) [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] return func(*args, **kwargs) [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] raise e [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] nwinfo = self.network_api.allocate_for_instance( [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] created_port_ids = self._update_ports_for_instance( [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] with excutils.save_and_reraise_exception(): [ 733.523953] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self.force_reraise() [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] raise self.value [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] updated_port = self._update_port( [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] _ensure_no_port_binding_failure(port) [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] raise exception.PortBindingFailed(port_id=port['id']) [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 733.524374] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] [ 733.524374] env[62585]: INFO nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Terminating instance [ 733.526192] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquiring lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.526192] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquired lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.526192] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.616717] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Successfully created port: eb8156a4-acac-4527-b0b7-6945e4b585b2 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.735906] env[62585]: DEBUG nova.compute.manager [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Received event network-changed-012c7730-176b-4987-8e0f-ba1c9678fb48 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.736123] env[62585]: DEBUG nova.compute.manager [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Refreshing instance network info cache due to event network-changed-012c7730-176b-4987-8e0f-ba1c9678fb48. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 733.736314] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] Acquiring lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.779835] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 734.035435] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac378ab-c774-4cc7-9095-931398c35145 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.043354] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e61ea7-5dce-4d8b-bdf1-305fb93d53b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.046682] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.075393] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3a5d70-7561-4b6a-af20-901ff45eb1f4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.082407] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b46c87-b6a2-4708-ac29-f7a6b618735c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.095806] env[62585]: DEBUG nova.compute.provider_tree [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.138522] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.286952] env[62585]: INFO nova.virt.block_device [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Booting with volume eede5dcd-43bb-4621-8d96-f9f9f103294a at /dev/sda [ 734.332837] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ca2e03b-528b-40f3-bba3-69990402aba3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.341928] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6468a90-f39d-4fb2-8ad7-0ced24a9777f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.362748] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8f3694b-78de-492b-abdd-9f06cf349f6f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.370110] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8027eb8-e657-4ae6-94ee-447f46526fab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.390806] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07041ef6-aeb2-4f20-92b3-5cf0253ac536 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.400639] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317965fa-440c-4b44-9e63-ee469fa8b42d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.410159] env[62585]: DEBUG nova.virt.block_device [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Updating existing volume attachment record: 2b577bd6-ae19-44b8-9a0b-5d2b8052263d {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 734.432161] env[62585]: DEBUG nova.compute.manager [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Received event network-changed-eb8156a4-acac-4527-b0b7-6945e4b585b2 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 734.432161] env[62585]: DEBUG nova.compute.manager [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Refreshing instance network info cache due to event network-changed-eb8156a4-acac-4527-b0b7-6945e4b585b2. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 734.432321] env[62585]: DEBUG oslo_concurrency.lockutils [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] Acquiring lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.432457] env[62585]: DEBUG oslo_concurrency.lockutils [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] Acquired lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.432620] env[62585]: DEBUG nova.network.neutron [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Refreshing network info cache for port eb8156a4-acac-4527-b0b7-6945e4b585b2 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.595422] env[62585]: ERROR nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 734.595422] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.595422] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.595422] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.595422] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.595422] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.595422] env[62585]: ERROR nova.compute.manager raise self.value [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.595422] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 734.595422] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.595422] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 734.595978] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.595978] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 734.595978] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 734.595978] env[62585]: ERROR nova.compute.manager [ 734.595978] env[62585]: Traceback (most recent call last): [ 734.595978] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 734.595978] env[62585]: listener.cb(fileno) [ 734.595978] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.595978] env[62585]: result = function(*args, **kwargs) [ 734.595978] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 734.595978] env[62585]: return func(*args, **kwargs) [ 734.595978] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 734.595978] env[62585]: raise e [ 734.595978] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.595978] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 734.595978] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.595978] env[62585]: created_port_ids = self._update_ports_for_instance( [ 734.595978] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.595978] env[62585]: with excutils.save_and_reraise_exception(): [ 734.595978] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.595978] env[62585]: self.force_reraise() [ 734.595978] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.595978] env[62585]: raise self.value [ 734.595978] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.595978] env[62585]: updated_port = self._update_port( [ 734.595978] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.595978] env[62585]: _ensure_no_port_binding_failure(port) [ 734.595978] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.595978] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 734.596865] env[62585]: nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 734.596865] env[62585]: Removing descriptor: 17 [ 734.598701] env[62585]: DEBUG nova.scheduler.client.report [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.642864] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Releasing lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.643339] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 734.643536] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 734.643839] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] Acquired lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.644014] env[62585]: DEBUG nova.network.neutron [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Refreshing network info cache for port 012c7730-176b-4987-8e0f-ba1c9678fb48 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.645113] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f825b576-742a-42e2-bc0b-6ab201de5ba5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.657266] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635d73a4-84d0-417c-a5e0-8c40164b61ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.679046] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d536e668-d597-4f8e-8d61-974e072b48c8 could not be found. [ 734.679267] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.679469] env[62585]: INFO nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 734.679713] env[62585]: DEBUG oslo.service.loopingcall [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.679928] env[62585]: DEBUG nova.compute.manager [-] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.680031] env[62585]: DEBUG nova.network.neutron [-] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.694318] env[62585]: DEBUG nova.network.neutron [-] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.956454] env[62585]: DEBUG nova.network.neutron [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.025694] env[62585]: DEBUG nova.network.neutron [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.102998] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.103543] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 735.106064] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.490s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.106247] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.106398] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 735.106682] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.556s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.113024] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b8fed8-7f99-44fe-9e3f-50c60987502e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.119934] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4bdae2-e36d-429f-afff-f4187500c4c1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.133911] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5b729a-5ff1-4302-8848-7956d2f46150 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.141523] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bdbc6e-9ebf-4263-8eae-2593a477a86c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.175900] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181049MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 735.175900] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.193066] env[62585]: DEBUG nova.network.neutron [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.196448] env[62585]: DEBUG nova.network.neutron [-] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.276549] env[62585]: DEBUG nova.network.neutron [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.527838] env[62585]: DEBUG oslo_concurrency.lockutils [req-636958d1-d74f-48f9-b001-992326bc216d req-e4576432-7317-4873-97d5-b4876e6b2ccd service nova] Releasing lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.611393] env[62585]: DEBUG nova.compute.utils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.612802] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 735.612969] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.673044] env[62585]: DEBUG nova.policy [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f386a62b6a4b4d5c85c215224f4bf04a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3ad1b88ad694ac3b73ad583abfe6966', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 735.698676] env[62585]: INFO nova.compute.manager [-] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Took 1.02 seconds to deallocate network for instance. [ 735.700984] env[62585]: DEBUG nova.compute.claims [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 735.701291] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.780141] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] Releasing lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.780406] env[62585]: DEBUG nova.compute.manager [req-2ef2bd7d-d0be-444a-b061-6ce20f1c510f req-56e1da9e-5f89-41d8-9651-528496fbdbc2 service nova] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Received event network-vif-deleted-012c7730-176b-4987-8e0f-ba1c9678fb48 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.862708] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20719c6-29b0-4ab1-b609-e0dd630d4223 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.870652] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12fd4a3-9c80-4b2c-80c4-38306fa72ab2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.900351] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89963bee-8c16-48a4-b38b-7a2c2f3d345b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.907488] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb1088d-67cc-49aa-9c28-d393c98d5801 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.920418] env[62585]: DEBUG nova.compute.provider_tree [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.021027] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Successfully created port: c55cb775-d8a7-4215-ab7d-75f240422742 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.117607] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 736.423486] env[62585]: DEBUG nova.scheduler.client.report [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.456294] env[62585]: DEBUG nova.compute.manager [req-a3828dc1-a16f-416d-aa3a-bb5b0a98e85d req-b81c2a98-e7ae-4b0d-ac2a-eebe228caf83 service nova] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Received event network-vif-deleted-eb8156a4-acac-4527-b0b7-6945e4b585b2 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 736.504042] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 736.504617] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 736.504851] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 736.505025] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.505242] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 736.505388] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.505644] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 736.506048] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 736.506214] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 736.506380] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 736.506537] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 736.506701] env[62585]: DEBUG nova.virt.hardware [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 736.507624] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f13e02-6b40-4ea3-bcf8-9ccc92c5919f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.516294] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722525cc-2cd4-499c-bd9c-f5f53f5bc183 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.529970] env[62585]: ERROR nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Traceback (most recent call last): [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] yield resources [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self.driver.spawn(context, instance, image_meta, [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] vm_ref = self.build_virtual_machine(instance, [ 736.529970] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] for vif in network_info: [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] return self._sync_wrapper(fn, *args, **kwargs) [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self.wait() [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self[:] = self._gt.wait() [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] return self._exit_event.wait() [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 736.530363] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] current.throw(*self._exc) [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] result = function(*args, **kwargs) [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] return func(*args, **kwargs) [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] raise e [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] nwinfo = self.network_api.allocate_for_instance( [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] created_port_ids = self._update_ports_for_instance( [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] with excutils.save_and_reraise_exception(): [ 736.530755] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self.force_reraise() [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] raise self.value [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] updated_port = self._update_port( [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] _ensure_no_port_binding_failure(port) [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] raise exception.PortBindingFailed(port_id=port['id']) [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 736.531171] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] [ 736.531171] env[62585]: INFO nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Terminating instance [ 736.532171] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquiring lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.532332] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquired lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.532489] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.905942] env[62585]: ERROR nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 736.905942] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.905942] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.905942] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.905942] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.905942] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.905942] env[62585]: ERROR nova.compute.manager raise self.value [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.905942] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 736.905942] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.905942] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 736.906489] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.906489] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 736.906489] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 736.906489] env[62585]: ERROR nova.compute.manager [ 736.906489] env[62585]: Traceback (most recent call last): [ 736.906489] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 736.906489] env[62585]: listener.cb(fileno) [ 736.906489] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.906489] env[62585]: result = function(*args, **kwargs) [ 736.906489] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 736.906489] env[62585]: return func(*args, **kwargs) [ 736.906489] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.906489] env[62585]: raise e [ 736.906489] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.906489] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 736.906489] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.906489] env[62585]: created_port_ids = self._update_ports_for_instance( [ 736.906489] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.906489] env[62585]: with excutils.save_and_reraise_exception(): [ 736.906489] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.906489] env[62585]: self.force_reraise() [ 736.906489] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.906489] env[62585]: raise self.value [ 736.906489] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.906489] env[62585]: updated_port = self._update_port( [ 736.906489] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.906489] env[62585]: _ensure_no_port_binding_failure(port) [ 736.906489] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.906489] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 736.907442] env[62585]: nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 736.907442] env[62585]: Removing descriptor: 17 [ 736.928810] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.822s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.929452] env[62585]: ERROR nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Traceback (most recent call last): [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self.driver.spawn(context, instance, image_meta, [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] vm_ref = self.build_virtual_machine(instance, [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.929452] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] for vif in network_info: [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] return self._sync_wrapper(fn, *args, **kwargs) [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self.wait() [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self[:] = self._gt.wait() [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] return self._exit_event.wait() [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] current.throw(*self._exc) [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.929803] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] result = function(*args, **kwargs) [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] return func(*args, **kwargs) [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] raise e [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] nwinfo = self.network_api.allocate_for_instance( [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] created_port_ids = self._update_ports_for_instance( [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] with excutils.save_and_reraise_exception(): [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] self.force_reraise() [ 736.930176] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] raise self.value [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] updated_port = self._update_port( [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] _ensure_no_port_binding_failure(port) [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] raise exception.PortBindingFailed(port_id=port['id']) [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] nova.exception.PortBindingFailed: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. [ 736.930528] env[62585]: ERROR nova.compute.manager [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] [ 736.930528] env[62585]: DEBUG nova.compute.utils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.931398] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.870s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.934627] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Build of instance 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3 was re-scheduled: Binding failed for port 1c02db82-9ce5-479c-82d4-3ae4efa61754, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 736.935069] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 736.935320] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.935467] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.935618] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.050088] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.127847] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.129726] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 737.155024] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.155024] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.155024] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.155374] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.155603] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.155883] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.156238] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.157032] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.157032] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.157032] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.157365] env[62585]: DEBUG nova.virt.hardware [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.158229] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b0e99a-d84a-4cc3-b00f-ed24dfc7a824 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.165827] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8115487d-38d4-4798-abb3-0d75fabcdd1c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.180058] env[62585]: ERROR nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Traceback (most recent call last): [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] yield resources [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self.driver.spawn(context, instance, image_meta, [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] vm_ref = self.build_virtual_machine(instance, [ 737.180058] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] for vif in network_info: [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] return self._sync_wrapper(fn, *args, **kwargs) [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self.wait() [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self[:] = self._gt.wait() [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] return self._exit_event.wait() [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 737.180543] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] current.throw(*self._exc) [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] result = function(*args, **kwargs) [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] return func(*args, **kwargs) [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] raise e [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] nwinfo = self.network_api.allocate_for_instance( [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] created_port_ids = self._update_ports_for_instance( [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] with excutils.save_and_reraise_exception(): [ 737.180946] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self.force_reraise() [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] raise self.value [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] updated_port = self._update_port( [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] _ensure_no_port_binding_failure(port) [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] raise exception.PortBindingFailed(port_id=port['id']) [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 737.181512] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] [ 737.181512] env[62585]: INFO nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Terminating instance [ 737.181872] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquiring lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.181872] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquired lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.182041] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.460088] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.545610] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.633010] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Releasing lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.633661] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 737.633992] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0312d8f4-019f-45ca-bb6c-1e89382b56a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.643361] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9fa16c-e194-43dc-8a70-ca27cf4be244 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.666603] env[62585]: WARNING nova.virt.vmwareapi.driver [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance f03bdd4b-e75e-4d70-84b3-126d2296994f could not be found. [ 737.666816] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 737.669054] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7845fd94-1d4e-4182-8f23-ab47e6bf4244 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.677117] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041ce20e-7e20-4114-be92-a8d8aaefcdb0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.701700] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f03bdd4b-e75e-4d70-84b3-126d2296994f could not be found. [ 737.701919] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 737.702110] env[62585]: INFO nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Took 0.07 seconds to destroy the instance on the hypervisor. [ 737.702350] env[62585]: DEBUG oslo.service.loopingcall [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.704501] env[62585]: DEBUG nova.compute.manager [-] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 737.704602] env[62585]: DEBUG nova.network.neutron [-] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.706395] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.718133] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b66947-fef4-4e48-b51b-0cfe3a8a96bd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.721850] env[62585]: DEBUG nova.network.neutron [-] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.728040] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacbc995-4482-4271-bfe5-858744ec2054 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.757219] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b9f379-89f4-439d-a4f4-6edf680b8209 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.764609] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a56f99-1363-4a4d-a04e-bab74dab1aac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.779174] env[62585]: DEBUG nova.compute.provider_tree [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.780974] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.047908] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "refresh_cache-20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.048177] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 738.048365] env[62585]: DEBUG nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.048525] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 738.063628] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.227345] env[62585]: DEBUG nova.network.neutron [-] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.283199] env[62585]: DEBUG nova.scheduler.client.report [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.286458] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Releasing lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.286824] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 738.287020] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.287520] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d7c2f31-6be6-42e7-85f8-f8332b88a170 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.295989] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96162f34-4032-43d5-811e-16c80a8ff802 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.317669] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0 could not be found. [ 738.317868] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 738.318053] env[62585]: INFO nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 738.318341] env[62585]: DEBUG oslo.service.loopingcall [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.318500] env[62585]: DEBUG nova.compute.manager [-] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.318610] env[62585]: DEBUG nova.network.neutron [-] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 738.334707] env[62585]: DEBUG nova.network.neutron [-] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.482333] env[62585]: DEBUG nova.compute.manager [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Received event network-changed-c55cb775-d8a7-4215-ab7d-75f240422742 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 738.482495] env[62585]: DEBUG nova.compute.manager [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Refreshing instance network info cache due to event network-changed-c55cb775-d8a7-4215-ab7d-75f240422742. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 738.482711] env[62585]: DEBUG oslo_concurrency.lockutils [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] Acquiring lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.482852] env[62585]: DEBUG oslo_concurrency.lockutils [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] Acquired lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.483012] env[62585]: DEBUG nova.network.neutron [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Refreshing network info cache for port c55cb775-d8a7-4215-ab7d-75f240422742 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.565908] env[62585]: DEBUG nova.network.neutron [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.732881] env[62585]: INFO nova.compute.manager [-] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Took 1.03 seconds to deallocate network for instance. [ 738.788683] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.857s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.790078] env[62585]: ERROR nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Traceback (most recent call last): [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self.driver.spawn(context, instance, image_meta, [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] vm_ref = self.build_virtual_machine(instance, [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.790078] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] for vif in network_info: [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] return self._sync_wrapper(fn, *args, **kwargs) [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self.wait() [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self[:] = self._gt.wait() [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] return self._exit_event.wait() [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] current.throw(*self._exc) [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.790487] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] result = function(*args, **kwargs) [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] return func(*args, **kwargs) [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] raise e [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] nwinfo = self.network_api.allocate_for_instance( [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] created_port_ids = self._update_ports_for_instance( [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] with excutils.save_and_reraise_exception(): [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] self.force_reraise() [ 738.790922] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] raise self.value [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] updated_port = self._update_port( [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] _ensure_no_port_binding_failure(port) [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] raise exception.PortBindingFailed(port_id=port['id']) [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] nova.exception.PortBindingFailed: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. [ 738.792241] env[62585]: ERROR nova.compute.manager [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] [ 738.792241] env[62585]: DEBUG nova.compute.utils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 738.792637] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.464s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.792733] env[62585]: INFO nova.compute.claims [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.795326] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Build of instance 2fccf900-e294-4d66-93c5-d1c7570c5d7e was re-scheduled: Binding failed for port b03b948a-5106-4776-aa7a-bcb2b5bae2d4, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 738.795815] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 738.795955] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquiring lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.796982] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Acquired lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.796982] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.836928] env[62585]: DEBUG nova.network.neutron [-] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.004728] env[62585]: DEBUG nova.network.neutron [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.049545] env[62585]: DEBUG nova.network.neutron [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.068470] env[62585]: INFO nova.compute.manager [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3] Took 1.02 seconds to deallocate network for instance. [ 739.289179] env[62585]: INFO nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Took 0.56 seconds to detach 1 volumes for instance. [ 739.291364] env[62585]: DEBUG nova.compute.claims [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 739.291542] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.314387] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.338968] env[62585]: INFO nova.compute.manager [-] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Took 1.02 seconds to deallocate network for instance. [ 739.340973] env[62585]: DEBUG nova.compute.claims [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 739.341163] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.379967] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.552868] env[62585]: DEBUG oslo_concurrency.lockutils [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] Releasing lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.552868] env[62585]: DEBUG nova.compute.manager [req-2923f5f9-f7c9-4acb-82bc-43cade1c82f4 req-4cdd7d06-f181-4515-92b2-853d4b14a920 service nova] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Received event network-vif-deleted-c55cb775-d8a7-4215-ab7d-75f240422742 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.882510] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Releasing lock "refresh_cache-2fccf900-e294-4d66-93c5-d1c7570c5d7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.882746] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 739.882923] env[62585]: DEBUG nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 739.883098] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.898033] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.046483] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e758fcc9-8a55-4e54-ad66-fd7016f1bea3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.053970] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7a354f-f733-4863-8120-ee06ea18572e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.092708] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d5cf76-c648-4432-974b-94f20bf5b780 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.099687] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e687a9-b02e-4ffc-950e-c6cb9af612d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.112648] env[62585]: DEBUG nova.compute.provider_tree [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.115093] env[62585]: INFO nova.scheduler.client.report [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Deleted allocations for instance 20cb5e74-a42c-4c79-aeea-7b8e658bf1d3 [ 740.400142] env[62585]: DEBUG nova.network.neutron [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.621362] env[62585]: DEBUG nova.scheduler.client.report [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.629018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59ea350f-de2b-4f5e-8708-3e0050a590c5 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "20cb5e74-a42c-4c79-aeea-7b8e658bf1d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.388s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.902825] env[62585]: INFO nova.compute.manager [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] [instance: 2fccf900-e294-4d66-93c5-d1c7570c5d7e] Took 1.02 seconds to deallocate network for instance. [ 741.127103] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.127664] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 741.130250] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 741.132741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.878s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.133998] env[62585]: INFO nova.compute.claims [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.638135] env[62585]: DEBUG nova.compute.utils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.644548] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 741.644719] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.665138] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.706600] env[62585]: DEBUG nova.policy [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28834cc42f8a49cebca5647badabf8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c49ab537d42244f495aaa3cbdaafc6b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 741.940403] env[62585]: INFO nova.scheduler.client.report [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Deleted allocations for instance 2fccf900-e294-4d66-93c5-d1c7570c5d7e [ 742.036248] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Successfully created port: 914d5549-a112-41c2-9c00-2fd475dd5265 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.147884] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 742.423959] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1e2ff4-ee3c-41e3-934a-d6c0004945ec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.431337] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54ce8f2-4fcb-471b-a6ce-9ded0c6f1e13 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.461438] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b0dbae2e-2c37-46b9-8512-4f470dbae8d3 tempest-InstanceActionsNegativeTestJSON-921022491 tempest-InstanceActionsNegativeTestJSON-921022491-project-member] Lock "2fccf900-e294-4d66-93c5-d1c7570c5d7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.594s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.463554] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7396401e-e90d-4a35-9abe-109a8a4cb62a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.473504] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a3a33d-ebdc-47d5-b992-301fd5880a7a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.485322] env[62585]: DEBUG nova.compute.provider_tree [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.499517] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "6483148a-b53d-46b9-8926-07b628f2ea3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.499750] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "6483148a-b53d-46b9-8926-07b628f2ea3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.969717] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 742.988152] env[62585]: DEBUG nova.scheduler.client.report [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.020635] env[62585]: DEBUG nova.compute.manager [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Received event network-changed-914d5549-a112-41c2-9c00-2fd475dd5265 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 743.020788] env[62585]: DEBUG nova.compute.manager [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Refreshing instance network info cache due to event network-changed-914d5549-a112-41c2-9c00-2fd475dd5265. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 743.021030] env[62585]: DEBUG oslo_concurrency.lockutils [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] Acquiring lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.021155] env[62585]: DEBUG oslo_concurrency.lockutils [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] Acquired lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.021312] env[62585]: DEBUG nova.network.neutron [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Refreshing network info cache for port 914d5549-a112-41c2-9c00-2fd475dd5265 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 743.168469] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 743.201983] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 743.202763] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 743.202944] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.203335] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 743.204323] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.204517] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 743.204732] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 743.204891] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 743.205106] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 743.205321] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 743.205496] env[62585]: DEBUG nova.virt.hardware [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 743.206364] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cd2494-9db2-4bbd-82d0-e78a98024513 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.214286] env[62585]: ERROR nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 743.214286] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.214286] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.214286] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.214286] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.214286] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.214286] env[62585]: ERROR nova.compute.manager raise self.value [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.214286] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 743.214286] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.214286] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 743.214894] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.214894] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 743.214894] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 743.214894] env[62585]: ERROR nova.compute.manager [ 743.214894] env[62585]: Traceback (most recent call last): [ 743.214894] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 743.214894] env[62585]: listener.cb(fileno) [ 743.214894] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.214894] env[62585]: result = function(*args, **kwargs) [ 743.214894] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.214894] env[62585]: return func(*args, **kwargs) [ 743.214894] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.214894] env[62585]: raise e [ 743.214894] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.214894] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 743.214894] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.214894] env[62585]: created_port_ids = self._update_ports_for_instance( [ 743.214894] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.214894] env[62585]: with excutils.save_and_reraise_exception(): [ 743.214894] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.214894] env[62585]: self.force_reraise() [ 743.214894] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.214894] env[62585]: raise self.value [ 743.214894] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.214894] env[62585]: updated_port = self._update_port( [ 743.214894] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.214894] env[62585]: _ensure_no_port_binding_failure(port) [ 743.214894] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.214894] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 743.215938] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 743.215938] env[62585]: Removing descriptor: 17 [ 743.220328] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617709b2-6f40-4a9a-9cc9-d57a7133d211 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.238586] env[62585]: ERROR nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Traceback (most recent call last): [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] yield resources [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self.driver.spawn(context, instance, image_meta, [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] vm_ref = self.build_virtual_machine(instance, [ 743.238586] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] for vif in network_info: [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] return self._sync_wrapper(fn, *args, **kwargs) [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self.wait() [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self[:] = self._gt.wait() [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] return self._exit_event.wait() [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 743.239061] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] current.throw(*self._exc) [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] result = function(*args, **kwargs) [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] return func(*args, **kwargs) [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] raise e [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] nwinfo = self.network_api.allocate_for_instance( [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] created_port_ids = self._update_ports_for_instance( [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] with excutils.save_and_reraise_exception(): [ 743.239484] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self.force_reraise() [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] raise self.value [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] updated_port = self._update_port( [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] _ensure_no_port_binding_failure(port) [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] raise exception.PortBindingFailed(port_id=port['id']) [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 743.239903] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] [ 743.239903] env[62585]: INFO nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Terminating instance [ 743.241329] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.490133] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.495116] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.495545] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 743.498017] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.524s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.546732] env[62585]: DEBUG nova.network.neutron [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.618445] env[62585]: DEBUG nova.network.neutron [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.002594] env[62585]: DEBUG nova.compute.utils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.009321] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 744.009515] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.050689] env[62585]: DEBUG nova.policy [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a84e734694a04c2384e9c0a6e84c3c3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12e1b64dd2034d459da3199dd74cb62a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 744.121500] env[62585]: DEBUG oslo_concurrency.lockutils [req-087608da-867b-4551-aa68-64a8af5e6fa0 req-f79d9734-ffdd-4367-a901-5329d263c23b service nova] Releasing lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.122114] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.122947] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.287087] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2099fbe6-d93d-44c8-b4f6-1062d4a356fb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.294502] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c087e15-cc36-4a29-bbf0-bc1291dc4195 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.325838] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138f0fab-fd31-4adb-af61-f059865f5c0b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.334666] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223b0d49-c679-4912-b322-386117cfed3a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.348457] env[62585]: DEBUG nova.compute.provider_tree [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.371772] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Successfully created port: db88c093-4462-4886-8542-eb8f0dd49f12 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.512699] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 744.650826] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.724517] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.851990] env[62585]: DEBUG nova.scheduler.client.report [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.049195] env[62585]: DEBUG nova.compute.manager [req-03ecf56f-e304-45cd-988b-f1d9cb74b336 req-2836dfb1-404b-407e-8c19-b20f4af0b5ae service nova] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Received event network-vif-deleted-914d5549-a112-41c2-9c00-2fd475dd5265 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.228560] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.229022] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 745.229236] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.229557] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9303c55b-d5c6-45ef-846b-d225ef2f1d5a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.244107] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43518ca3-f2f6-46f2-b1a2-6e751d9eacdc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.267402] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5 could not be found. [ 745.267652] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 745.267832] env[62585]: INFO nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 745.268095] env[62585]: DEBUG oslo.service.loopingcall [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.268341] env[62585]: DEBUG nova.compute.manager [-] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 745.268438] env[62585]: DEBUG nova.network.neutron [-] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 745.286755] env[62585]: DEBUG nova.network.neutron [-] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.316644] env[62585]: ERROR nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 745.316644] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.316644] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.316644] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.316644] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.316644] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.316644] env[62585]: ERROR nova.compute.manager raise self.value [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.316644] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 745.316644] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.316644] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 745.317199] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.317199] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 745.317199] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 745.317199] env[62585]: ERROR nova.compute.manager [ 745.317199] env[62585]: Traceback (most recent call last): [ 745.317199] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 745.317199] env[62585]: listener.cb(fileno) [ 745.317199] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.317199] env[62585]: result = function(*args, **kwargs) [ 745.317199] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.317199] env[62585]: return func(*args, **kwargs) [ 745.317199] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.317199] env[62585]: raise e [ 745.317199] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.317199] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 745.317199] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.317199] env[62585]: created_port_ids = self._update_ports_for_instance( [ 745.317199] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.317199] env[62585]: with excutils.save_and_reraise_exception(): [ 745.317199] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.317199] env[62585]: self.force_reraise() [ 745.317199] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.317199] env[62585]: raise self.value [ 745.317199] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.317199] env[62585]: updated_port = self._update_port( [ 745.317199] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.317199] env[62585]: _ensure_no_port_binding_failure(port) [ 745.317199] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.317199] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 745.318162] env[62585]: nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 745.318162] env[62585]: Removing descriptor: 17 [ 745.358205] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.860s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.358993] env[62585]: ERROR nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Traceback (most recent call last): [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self.driver.spawn(context, instance, image_meta, [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] vm_ref = self.build_virtual_machine(instance, [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.358993] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] for vif in network_info: [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] return self._sync_wrapper(fn, *args, **kwargs) [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self.wait() [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self[:] = self._gt.wait() [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] return self._exit_event.wait() [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] current.throw(*self._exc) [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.359673] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] result = function(*args, **kwargs) [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] return func(*args, **kwargs) [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] raise e [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] nwinfo = self.network_api.allocate_for_instance( [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] created_port_ids = self._update_ports_for_instance( [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] with excutils.save_and_reraise_exception(): [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] self.force_reraise() [ 745.360350] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] raise self.value [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] updated_port = self._update_port( [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] _ensure_no_port_binding_failure(port) [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] raise exception.PortBindingFailed(port_id=port['id']) [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] nova.exception.PortBindingFailed: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. [ 745.361257] env[62585]: ERROR nova.compute.manager [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] [ 745.361257] env[62585]: DEBUG nova.compute.utils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 745.361933] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.900s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.362649] env[62585]: INFO nova.compute.claims [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.365723] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Build of instance 7504c221-2d27-4dc6-9100-9a2dca2a6036 was re-scheduled: Binding failed for port 5e91260d-350d-4dba-b3e1-d8e7abfad0a5, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 745.366237] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 745.366474] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.366620] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.366776] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.523597] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 745.548110] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.548390] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.548548] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.548728] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.548871] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.549028] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.549234] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.549392] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.549551] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.549705] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.549871] env[62585]: DEBUG nova.virt.hardware [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.550742] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8854fdea-ce0a-46c4-8682-49e31aac54b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.558660] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1a686a-13af-452c-af47-b86ab0bb9722 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.571861] env[62585]: ERROR nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Traceback (most recent call last): [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] yield resources [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self.driver.spawn(context, instance, image_meta, [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] vm_ref = self.build_virtual_machine(instance, [ 745.571861] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] for vif in network_info: [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] return self._sync_wrapper(fn, *args, **kwargs) [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self.wait() [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self[:] = self._gt.wait() [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] return self._exit_event.wait() [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 745.572455] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] current.throw(*self._exc) [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] result = function(*args, **kwargs) [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] return func(*args, **kwargs) [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] raise e [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] nwinfo = self.network_api.allocate_for_instance( [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] created_port_ids = self._update_ports_for_instance( [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] with excutils.save_and_reraise_exception(): [ 745.573024] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self.force_reraise() [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] raise self.value [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] updated_port = self._update_port( [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] _ensure_no_port_binding_failure(port) [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] raise exception.PortBindingFailed(port_id=port['id']) [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 745.573462] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] [ 745.573462] env[62585]: INFO nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Terminating instance [ 745.574636] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquiring lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.574797] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquired lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.574957] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.790463] env[62585]: DEBUG nova.network.neutron [-] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.887146] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.975184] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.091036] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.160410] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.294172] env[62585]: INFO nova.compute.manager [-] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Took 1.03 seconds to deallocate network for instance. [ 746.296554] env[62585]: DEBUG nova.compute.claims [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 746.296728] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.478171] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-7504c221-2d27-4dc6-9100-9a2dca2a6036" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.478461] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 746.478646] env[62585]: DEBUG nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 746.478814] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.494933] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.586817] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bde425-08c5-4e84-8c3d-c43f807c5cd7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.594453] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125665af-a354-414d-b668-06ac67317ab9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.623760] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c867dbb-0904-48fb-9f46-fa6d7ac895e9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.630476] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5509add4-191d-4a12-8a3a-e6550c6b6096 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.643638] env[62585]: DEBUG nova.compute.provider_tree [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.663717] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Releasing lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.664151] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 746.664342] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.664612] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9325d82d-410d-4be2-a2b5-c3bdfede2f8a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.674105] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab5d991-9340-4387-a921-8ba9dabe6994 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.696652] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b1587330-1740-4bfd-a0c3-a25794c3ccd3 could not be found. [ 746.696782] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 746.696959] env[62585]: INFO nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 746.697218] env[62585]: DEBUG oslo.service.loopingcall [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.698028] env[62585]: DEBUG nova.compute.manager [-] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 746.698028] env[62585]: DEBUG nova.network.neutron [-] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.711688] env[62585]: DEBUG nova.network.neutron [-] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.997971] env[62585]: DEBUG nova.network.neutron [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.071524] env[62585]: DEBUG nova.compute.manager [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Received event network-changed-db88c093-4462-4886-8542-eb8f0dd49f12 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.071723] env[62585]: DEBUG nova.compute.manager [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Refreshing instance network info cache due to event network-changed-db88c093-4462-4886-8542-eb8f0dd49f12. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 747.071921] env[62585]: DEBUG oslo_concurrency.lockutils [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] Acquiring lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.072070] env[62585]: DEBUG oslo_concurrency.lockutils [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] Acquired lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.072226] env[62585]: DEBUG nova.network.neutron [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Refreshing network info cache for port db88c093-4462-4886-8542-eb8f0dd49f12 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.149391] env[62585]: DEBUG nova.scheduler.client.report [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.213791] env[62585]: DEBUG nova.network.neutron [-] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.501269] env[62585]: INFO nova.compute.manager [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 7504c221-2d27-4dc6-9100-9a2dca2a6036] Took 1.02 seconds to deallocate network for instance. [ 747.590041] env[62585]: DEBUG nova.network.neutron [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.654358] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.654882] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 747.658219] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.586s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.672517] env[62585]: DEBUG nova.network.neutron [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.716770] env[62585]: INFO nova.compute.manager [-] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Took 1.02 seconds to deallocate network for instance. [ 747.718910] env[62585]: DEBUG nova.compute.claims [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 747.719111] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.163398] env[62585]: DEBUG nova.compute.utils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 748.168472] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 748.168789] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.175380] env[62585]: DEBUG oslo_concurrency.lockutils [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] Releasing lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.175479] env[62585]: DEBUG nova.compute.manager [req-f8d5816b-1da3-46d0-aaf1-ed7528d567dd req-43d1d688-7069-44b6-9be1-544a6d247ec6 service nova] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Received event network-vif-deleted-db88c093-4462-4886-8542-eb8f0dd49f12 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.206090] env[62585]: DEBUG nova.policy [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '846fa76ac8244bc4a9a0d444c4af0d3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9645866ca8f0433cae30cf5867244ca8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 748.409254] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa158c27-2041-4665-a5d0-17020e0ec48e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.416764] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d59668-d165-4143-a9fe-99b8b2228c07 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.447181] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014d0c2a-1e92-4e00-ae43-dd8a808aa610 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.454111] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a5ed49-1afd-4801-acf8-e95d94c694f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.466665] env[62585]: DEBUG nova.compute.provider_tree [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.513844] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Successfully created port: 0757c247-2448-4204-a5a6-1e94d33cc383 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.532942] env[62585]: INFO nova.scheduler.client.report [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted allocations for instance 7504c221-2d27-4dc6-9100-9a2dca2a6036 [ 748.669695] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 748.970191] env[62585]: DEBUG nova.scheduler.client.report [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.043456] env[62585]: DEBUG oslo_concurrency.lockutils [None req-58882051-44f1-4f87-ba59-799730d4ba84 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "7504c221-2d27-4dc6-9100-9a2dca2a6036" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.266s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.270947] env[62585]: DEBUG nova.compute.manager [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Received event network-changed-0757c247-2448-4204-a5a6-1e94d33cc383 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 749.271197] env[62585]: DEBUG nova.compute.manager [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Refreshing instance network info cache due to event network-changed-0757c247-2448-4204-a5a6-1e94d33cc383. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 749.271371] env[62585]: DEBUG oslo_concurrency.lockutils [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] Acquiring lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.271509] env[62585]: DEBUG oslo_concurrency.lockutils [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] Acquired lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.271661] env[62585]: DEBUG nova.network.neutron [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Refreshing network info cache for port 0757c247-2448-4204-a5a6-1e94d33cc383 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.452047] env[62585]: ERROR nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 749.452047] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.452047] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.452047] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.452047] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.452047] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.452047] env[62585]: ERROR nova.compute.manager raise self.value [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.452047] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 749.452047] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.452047] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 749.452728] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.452728] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 749.452728] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 749.452728] env[62585]: ERROR nova.compute.manager [ 749.452728] env[62585]: Traceback (most recent call last): [ 749.452728] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 749.452728] env[62585]: listener.cb(fileno) [ 749.452728] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.452728] env[62585]: result = function(*args, **kwargs) [ 749.452728] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.452728] env[62585]: return func(*args, **kwargs) [ 749.452728] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.452728] env[62585]: raise e [ 749.452728] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.452728] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 749.452728] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.452728] env[62585]: created_port_ids = self._update_ports_for_instance( [ 749.452728] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.452728] env[62585]: with excutils.save_and_reraise_exception(): [ 749.452728] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.452728] env[62585]: self.force_reraise() [ 749.452728] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.452728] env[62585]: raise self.value [ 749.452728] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.452728] env[62585]: updated_port = self._update_port( [ 749.452728] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.452728] env[62585]: _ensure_no_port_binding_failure(port) [ 749.452728] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.452728] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 749.454012] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 749.454012] env[62585]: Removing descriptor: 17 [ 749.475416] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.817s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.476051] env[62585]: ERROR nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Traceback (most recent call last): [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self.driver.spawn(context, instance, image_meta, [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] vm_ref = self.build_virtual_machine(instance, [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.476051] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] for vif in network_info: [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] return self._sync_wrapper(fn, *args, **kwargs) [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self.wait() [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self[:] = self._gt.wait() [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] return self._exit_event.wait() [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] current.throw(*self._exc) [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.476428] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] result = function(*args, **kwargs) [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] return func(*args, **kwargs) [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] raise e [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] nwinfo = self.network_api.allocate_for_instance( [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] created_port_ids = self._update_ports_for_instance( [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] with excutils.save_and_reraise_exception(): [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] self.force_reraise() [ 749.476931] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] raise self.value [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] updated_port = self._update_port( [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] _ensure_no_port_binding_failure(port) [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] raise exception.PortBindingFailed(port_id=port['id']) [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] nova.exception.PortBindingFailed: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. [ 749.477620] env[62585]: ERROR nova.compute.manager [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] [ 749.477620] env[62585]: DEBUG nova.compute.utils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.477969] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.302s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.479838] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Build of instance a0512ab3-1248-4f38-8ed9-249ba5a2d488 was re-scheduled: Binding failed for port 4f7145a0-f972-474b-912b-03da34495d70, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 749.480105] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 749.480496] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquiring lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.480496] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Acquired lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.480668] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.546958] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 749.683167] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 749.708773] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.709036] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.709195] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.709373] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.709517] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.709660] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.709856] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.710020] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.710183] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.710340] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.710553] env[62585]: DEBUG nova.virt.hardware [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.711710] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47400ec9-4fdb-44e0-8140-606399332489 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.719730] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08620154-be78-4a25-9c35-eb38403ed999 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.732930] env[62585]: ERROR nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Traceback (most recent call last): [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] yield resources [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self.driver.spawn(context, instance, image_meta, [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] vm_ref = self.build_virtual_machine(instance, [ 749.732930] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] for vif in network_info: [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] return self._sync_wrapper(fn, *args, **kwargs) [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self.wait() [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self[:] = self._gt.wait() [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] return self._exit_event.wait() [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 749.733332] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] current.throw(*self._exc) [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] result = function(*args, **kwargs) [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] return func(*args, **kwargs) [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] raise e [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] nwinfo = self.network_api.allocate_for_instance( [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] created_port_ids = self._update_ports_for_instance( [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] with excutils.save_and_reraise_exception(): [ 749.733758] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self.force_reraise() [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] raise self.value [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] updated_port = self._update_port( [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] _ensure_no_port_binding_failure(port) [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] raise exception.PortBindingFailed(port_id=port['id']) [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 749.734190] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] [ 749.734190] env[62585]: INFO nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Terminating instance [ 749.735157] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.789744] env[62585]: DEBUG nova.network.neutron [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.871909] env[62585]: DEBUG nova.network.neutron [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.003279] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.070240] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.084814] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.374439] env[62585]: DEBUG oslo_concurrency.lockutils [req-9aa2a0eb-4243-4c7c-b64a-66fcab79fe7c req-797231fb-10cb-48be-92a4-6a0c0e83dfd5 service nova] Releasing lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.374863] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.375066] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.587851] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Releasing lock "refresh_cache-a0512ab3-1248-4f38-8ed9-249ba5a2d488" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.588734] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 750.588734] env[62585]: DEBUG nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 750.588734] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.603971] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.632715] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "e4373e2a-cc21-41b7-be28-9b140ab43247" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.632939] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "e4373e2a-cc21-41b7-be28-9b140ab43247" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.892501] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.972714] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.009337] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance a0512ab3-1248-4f38-8ed9-249ba5a2d488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.009491] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d536e668-d597-4f8e-8d61-974e072b48c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 751.009615] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance f03bdd4b-e75e-4d70-84b3-126d2296994f actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 751.009759] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 751.009876] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 751.009988] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance b1587330-1740-4bfd-a0c3-a25794c3ccd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 751.010113] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance ad45de09-f60c-4ac5-a4ff-7088d9742d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 751.106688] env[62585]: DEBUG nova.network.neutron [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.297840] env[62585]: DEBUG nova.compute.manager [req-e90153e2-4089-462d-ba8b-8780ca5c1014 req-1c91b2c0-2772-4f86-b3e8-c8fa793138ae service nova] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Received event network-vif-deleted-0757c247-2448-4204-a5a6-1e94d33cc383 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 751.475455] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.475899] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 751.476106] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.476414] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73d3f136-79fb-42fc-a288-7d8210e3e2e0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.485845] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99ee1e1-9fac-44ee-89e6-8ed44d084a7e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.507257] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad45de09-f60c-4ac5-a4ff-7088d9742d6a could not be found. [ 751.507511] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.507696] env[62585]: INFO nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 751.507940] env[62585]: DEBUG oslo.service.loopingcall [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.508204] env[62585]: DEBUG nova.compute.manager [-] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.508264] env[62585]: DEBUG nova.network.neutron [-] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.512607] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance dd57237d-875e-453a-b830-749776ce10b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.522924] env[62585]: DEBUG nova.network.neutron [-] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.608687] env[62585]: INFO nova.compute.manager [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] [instance: a0512ab3-1248-4f38-8ed9-249ba5a2d488] Took 1.02 seconds to deallocate network for instance. [ 752.015586] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 92168077-2b7e-4355-9880-a2f62674fc7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.027078] env[62585]: DEBUG nova.network.neutron [-] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.518558] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.529037] env[62585]: INFO nova.compute.manager [-] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Took 1.02 seconds to deallocate network for instance. [ 752.531621] env[62585]: DEBUG nova.compute.claims [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 752.531850] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.642239] env[62585]: INFO nova.scheduler.client.report [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Deleted allocations for instance a0512ab3-1248-4f38-8ed9-249ba5a2d488 [ 753.022139] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 66af981d-2fa4-4ef4-ac39-3f8f78c543af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.155750] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d9e888d1-17c8-4a08-af9d-2f6e055a3baa tempest-InstanceActionsV221TestJSON-842499034 tempest-InstanceActionsV221TestJSON-842499034-project-member] Lock "a0512ab3-1248-4f38-8ed9-249ba5a2d488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.686s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.525528] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance c6f0ee10-c5cc-41ad-8b81-f7644921845b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.658166] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 754.029921] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.179185] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.535479] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 8a9daa60-e93a-4276-bf23-652ae7b0618b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.038062] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 4dfc00d9-64db-439e-baee-041562f7354b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.542933] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance e4bd743b-b3a6-4872-9e33-a0183b976292 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.045928] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 54e0a14b-cc4f-4445-8d86-f25cc410d7d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.549582] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance aed35d7d-f826-4601-aa4e-1d1dccd51d3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.052740] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 6483148a-b53d-46b9-8926-07b628f2ea3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.556188] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance e4373e2a-cc21-41b7-be28-9b140ab43247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.556474] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 757.556657] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 757.783861] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1306aa20-167a-46b9-8215-778967f36dac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.794025] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08248d0a-f5b1-4dc2-ada7-a65d59b85292 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.824607] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8743e7ae-ca31-4225-a38f-427eb8ab6d9b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.831813] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947525b3-48eb-433b-9f36-7b22a253d682 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.844861] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.348324] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 758.853321] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 758.853610] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.376s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.853834] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.153s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.856750] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.856919] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Cleaning up deleted instances {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 759.363774] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] There are 5 instances to clean {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 759.364059] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 2cf0927d-8d98-4554-92ce-c049e1ea179c] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 759.577007] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac685dbf-3c69-4ece-bc54-8fbf33fe0b86 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.584818] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afeb98c-62c6-4b8b-8cb9-351180cb0bc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.615708] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ca49b0-76ef-4253-963d-e8f46945e5e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.623209] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241f7dfe-d17e-4bb8-a4db-5e16a64a2509 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.638354] env[62585]: DEBUG nova.compute.provider_tree [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.867201] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 16f01d66-44f8-4912-989a-48c39f667c95] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 760.142632] env[62585]: DEBUG nova.scheduler.client.report [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.370168] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 149bd77b-9583-42e5-8c82-f795cac53b87] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 760.647685] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.794s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.648348] env[62585]: ERROR nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Traceback (most recent call last): [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self.driver.spawn(context, instance, image_meta, [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] vm_ref = self.build_virtual_machine(instance, [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] vif_infos = vmwarevif.get_vif_info(self._session, [ 760.648348] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] for vif in network_info: [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] return self._sync_wrapper(fn, *args, **kwargs) [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self.wait() [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self[:] = self._gt.wait() [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] return self._exit_event.wait() [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] current.throw(*self._exc) [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 760.648687] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] result = function(*args, **kwargs) [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] return func(*args, **kwargs) [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] raise e [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] nwinfo = self.network_api.allocate_for_instance( [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] created_port_ids = self._update_ports_for_instance( [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] with excutils.save_and_reraise_exception(): [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] self.force_reraise() [ 760.648995] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] raise self.value [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] updated_port = self._update_port( [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] _ensure_no_port_binding_failure(port) [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] raise exception.PortBindingFailed(port_id=port['id']) [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] nova.exception.PortBindingFailed: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. [ 760.649321] env[62585]: ERROR nova.compute.manager [instance: d536e668-d597-4f8e-8d61-974e072b48c8] [ 760.649321] env[62585]: DEBUG nova.compute.utils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 760.650673] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.359s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.656048] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Build of instance d536e668-d597-4f8e-8d61-974e072b48c8 was re-scheduled: Binding failed for port 012c7730-176b-4987-8e0f-ba1c9678fb48, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 760.656048] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 760.656048] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquiring lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.656048] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Acquired lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.656257] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.874341] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: dd387320-7101-440c-80bc-a7d19a654df8] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 761.177548] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.273168] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.378015] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 779efd7e-99d5-4065-8ade-1665533677a4] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 761.384484] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd962e3-b6dd-4762-b2d9-7ce68179c46f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.392408] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1006a899-45ad-4f6f-bb66-4a33a38fbea0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.427202] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db71e83-2885-4122-b1e1-90f202d74ae7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.438048] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b666eb3-9a5c-486d-8a0d-3a075cf6e96b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.453080] env[62585]: DEBUG nova.compute.provider_tree [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.775887] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Releasing lock "refresh_cache-d536e668-d597-4f8e-8d61-974e072b48c8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.776036] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 761.776225] env[62585]: DEBUG nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 761.776391] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.792372] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.880966] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.881389] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Cleaning up deleted instances with incomplete migration {{(pid=62585) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 761.957618] env[62585]: DEBUG nova.scheduler.client.report [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 762.295346] env[62585]: DEBUG nova.network.neutron [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.383802] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.462211] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.811s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.462846] env[62585]: ERROR nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Traceback (most recent call last): [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self.driver.spawn(context, instance, image_meta, [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] vm_ref = self.build_virtual_machine(instance, [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] vif_infos = vmwarevif.get_vif_info(self._session, [ 762.462846] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] for vif in network_info: [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] return self._sync_wrapper(fn, *args, **kwargs) [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self.wait() [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self[:] = self._gt.wait() [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] return self._exit_event.wait() [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] current.throw(*self._exc) [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 762.463127] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] result = function(*args, **kwargs) [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] return func(*args, **kwargs) [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] raise e [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] nwinfo = self.network_api.allocate_for_instance( [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] created_port_ids = self._update_ports_for_instance( [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] with excutils.save_and_reraise_exception(): [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] self.force_reraise() [ 762.463490] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] raise self.value [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] updated_port = self._update_port( [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] _ensure_no_port_binding_failure(port) [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] raise exception.PortBindingFailed(port_id=port['id']) [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] nova.exception.PortBindingFailed: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. [ 762.463802] env[62585]: ERROR nova.compute.manager [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] [ 762.463802] env[62585]: DEBUG nova.compute.utils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 762.464761] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.124s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.467523] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Build of instance f03bdd4b-e75e-4d70-84b3-126d2296994f was re-scheduled: Binding failed for port eb8156a4-acac-4527-b0b7-6945e4b585b2, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 762.467925] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 762.468157] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquiring lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.468302] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Acquired lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.468458] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.798483] env[62585]: INFO nova.compute.manager [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] [instance: d536e668-d597-4f8e-8d61-974e072b48c8] Took 1.02 seconds to deallocate network for instance. [ 762.989023] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.083984] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.180136] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc1d61b-441e-4b1e-8074-c49eabea633d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.188066] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b0769e-a392-4430-9833-11b73baca293 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.218262] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbe78a3-0e66-4e72-bc2e-fd643ce7bbdf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.225338] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b389cd4e-ca77-43d1-822a-a0a40a6cf361 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.238488] env[62585]: DEBUG nova.compute.provider_tree [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.587046] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Releasing lock "refresh_cache-f03bdd4b-e75e-4d70-84b3-126d2296994f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.587135] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 763.587320] env[62585]: DEBUG nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 763.587519] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.603951] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.741867] env[62585]: DEBUG nova.scheduler.client.report [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.825221] env[62585]: INFO nova.scheduler.client.report [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Deleted allocations for instance d536e668-d597-4f8e-8d61-974e072b48c8 [ 764.107513] env[62585]: DEBUG nova.network.neutron [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.246178] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.781s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.246626] env[62585]: ERROR nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Traceback (most recent call last): [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self.driver.spawn(context, instance, image_meta, [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] vm_ref = self.build_virtual_machine(instance, [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 764.246626] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] for vif in network_info: [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] return self._sync_wrapper(fn, *args, **kwargs) [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self.wait() [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self[:] = self._gt.wait() [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] return self._exit_event.wait() [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] current.throw(*self._exc) [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 764.246863] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] result = function(*args, **kwargs) [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] return func(*args, **kwargs) [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] raise e [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] nwinfo = self.network_api.allocate_for_instance( [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] created_port_ids = self._update_ports_for_instance( [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] with excutils.save_and_reraise_exception(): [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] self.force_reraise() [ 764.247128] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] raise self.value [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] updated_port = self._update_port( [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] _ensure_no_port_binding_failure(port) [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] raise exception.PortBindingFailed(port_id=port['id']) [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] nova.exception.PortBindingFailed: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. [ 764.247400] env[62585]: ERROR nova.compute.manager [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] [ 764.247400] env[62585]: DEBUG nova.compute.utils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 764.248904] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.584s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.250444] env[62585]: INFO nova.compute.claims [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.253176] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Build of instance 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0 was re-scheduled: Binding failed for port c55cb775-d8a7-4215-ab7d-75f240422742, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 764.253618] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 764.253857] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquiring lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.253982] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Acquired lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.254148] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.336270] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9b4f3172-8e60-4f03-8776-7970dcf64035 tempest-ServerGroupTestJSON-126064004 tempest-ServerGroupTestJSON-126064004-project-member] Lock "d536e668-d597-4f8e-8d61-974e072b48c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.370s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.611388] env[62585]: INFO nova.compute.manager [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] [instance: f03bdd4b-e75e-4d70-84b3-126d2296994f] Took 1.02 seconds to deallocate network for instance. [ 764.787138] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.841244] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 764.854597] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.357194] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Releasing lock "refresh_cache-84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.359832] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 765.359832] env[62585]: DEBUG nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.359832] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.381095] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.420345] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.647891] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b0b383-0c0b-4af7-b3bc-a34368b398d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.659433] env[62585]: INFO nova.scheduler.client.report [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Deleted allocations for instance f03bdd4b-e75e-4d70-84b3-126d2296994f [ 765.666099] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22f603e-4c67-41f9-b125-163338637610 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.702395] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccb73d7-c19d-459d-a745-e6377b4df564 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.710463] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ce8cab-1b0c-4e8b-ac9f-94a73576162e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.725608] env[62585]: DEBUG nova.compute.provider_tree [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.883395] env[62585]: DEBUG nova.network.neutron [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.173099] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f8525e2-b04a-4358-b2a3-b67f9856f849 tempest-ServerActionsV293TestJSON-1952186505 tempest-ServerActionsV293TestJSON-1952186505-project-member] Lock "f03bdd4b-e75e-4d70-84b3-126d2296994f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.498s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.229264] env[62585]: DEBUG nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 766.389811] env[62585]: INFO nova.compute.manager [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] [instance: 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0] Took 1.03 seconds to deallocate network for instance. [ 766.676159] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 766.737104] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.737659] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 766.740091] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.250s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.741437] env[62585]: INFO nova.compute.claims [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.202268] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.246201] env[62585]: DEBUG nova.compute.utils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 767.250380] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 767.251269] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.310208] env[62585]: DEBUG nova.policy [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc3a99a607fe490fbff954dd2cf89914', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd40ca08c01ef475b82b42c8f7384791a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 767.428287] env[62585]: INFO nova.scheduler.client.report [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Deleted allocations for instance 84367bf5-0f74-43c5-b49c-e0f4dde5b1d0 [ 767.751379] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 767.939269] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1da2cd9a-09ea-4a66-97c4-5a872f123a89 tempest-ServerDiagnosticsNegativeTest-1551284821 tempest-ServerDiagnosticsNegativeTest-1551284821-project-member] Lock "84367bf5-0f74-43c5-b49c-e0f4dde5b1d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.772s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.962801] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5117fce-1ea7-4b08-8064-91c9686df7eb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.971347] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51e8b93-2056-4218-8f4d-c7f359a8aa1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.974262] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Successfully created port: 895bbcc2-1399-4c22-84c8-92c9e1795257 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.002821] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce17cb22-9999-42be-bb41-515b9c310ab3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.009911] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85907e48-fb04-4057-a25d-ce5527ac200d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.023260] env[62585]: DEBUG nova.compute.provider_tree [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.442746] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 768.526471] env[62585]: DEBUG nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.768107] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 768.797935] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.798211] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.798366] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.798548] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.798816] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.798879] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.799087] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.799288] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.799461] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.799739] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.799798] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.801369] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8e7682-b5d0-4897-8c74-69c1f857dfca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.810499] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e42d08-6bf4-4070-83f2-390db2971815 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.977163] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.034743] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.035294] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 769.038094] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.741s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.365947] env[62585]: DEBUG nova.compute.manager [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] [instance: dd57237d-875e-453a-b830-749776ce10b4] Received event network-changed-895bbcc2-1399-4c22-84c8-92c9e1795257 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 769.365947] env[62585]: DEBUG nova.compute.manager [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] [instance: dd57237d-875e-453a-b830-749776ce10b4] Refreshing instance network info cache due to event network-changed-895bbcc2-1399-4c22-84c8-92c9e1795257. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 769.367500] env[62585]: DEBUG oslo_concurrency.lockutils [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] Acquiring lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.371023] env[62585]: DEBUG oslo_concurrency.lockutils [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] Acquired lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.371023] env[62585]: DEBUG nova.network.neutron [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] [instance: dd57237d-875e-453a-b830-749776ce10b4] Refreshing network info cache for port 895bbcc2-1399-4c22-84c8-92c9e1795257 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 770.154226] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 770.154226] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.154226] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.154226] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.154226] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.154226] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.154226] env[62585]: ERROR nova.compute.manager raise self.value [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.154226] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 770.154226] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.154226] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 770.154795] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.154795] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 770.154795] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 770.154795] env[62585]: ERROR nova.compute.manager [ 770.154795] env[62585]: Traceback (most recent call last): [ 770.154795] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 770.154795] env[62585]: listener.cb(fileno) [ 770.154795] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.154795] env[62585]: result = function(*args, **kwargs) [ 770.154795] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.154795] env[62585]: return func(*args, **kwargs) [ 770.154795] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.154795] env[62585]: raise e [ 770.154795] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.154795] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 770.154795] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.154795] env[62585]: created_port_ids = self._update_ports_for_instance( [ 770.154795] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.154795] env[62585]: with excutils.save_and_reraise_exception(): [ 770.154795] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.154795] env[62585]: self.force_reraise() [ 770.154795] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.154795] env[62585]: raise self.value [ 770.154795] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.154795] env[62585]: updated_port = self._update_port( [ 770.154795] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.154795] env[62585]: _ensure_no_port_binding_failure(port) [ 770.154795] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.154795] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 770.155409] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 770.155409] env[62585]: Removing descriptor: 17 [ 770.155870] env[62585]: DEBUG nova.compute.utils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.161478] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] Traceback (most recent call last): [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] yield resources [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self.driver.spawn(context, instance, image_meta, [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] vm_ref = self.build_virtual_machine(instance, [ 770.161478] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] for vif in network_info: [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return self._sync_wrapper(fn, *args, **kwargs) [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self.wait() [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self[:] = self._gt.wait() [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return self._exit_event.wait() [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 770.161834] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] result = hub.switch() [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return self.greenlet.switch() [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] result = function(*args, **kwargs) [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return func(*args, **kwargs) [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] raise e [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] nwinfo = self.network_api.allocate_for_instance( [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] created_port_ids = self._update_ports_for_instance( [ 770.162269] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] with excutils.save_and_reraise_exception(): [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self.force_reraise() [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] raise self.value [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] updated_port = self._update_port( [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] _ensure_no_port_binding_failure(port) [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] raise exception.PortBindingFailed(port_id=port['id']) [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 770.162575] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] [ 770.162863] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Terminating instance [ 770.163815] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 770.163932] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 770.167124] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.183564] env[62585]: DEBUG nova.network.neutron [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.223172] env[62585]: DEBUG nova.policy [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc3a99a607fe490fbff954dd2cf89914', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd40ca08c01ef475b82b42c8f7384791a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 770.312942] env[62585]: DEBUG nova.network.neutron [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] [instance: dd57237d-875e-453a-b830-749776ce10b4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.457242] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cb8cc5-857a-4519-b9b9-9436dc5d887a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.465325] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53c37d4-98eb-4f44-b57f-dda20c046967 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.493980] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca9c06e-84ed-4a50-a856-967d96c223dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.501378] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4241579f-2aee-49a8-9d0b-87a639d8aa6d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.516819] env[62585]: DEBUG nova.compute.provider_tree [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.559709] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Successfully created port: 795912b3-b7d8-41c0-8e52-65af33e6e0e8 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 770.664495] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 770.818330] env[62585]: DEBUG oslo_concurrency.lockutils [req-066c8d66-0f78-4f06-ba10-c719d1eff887 req-6685d8a0-289f-4925-aedb-99ff9ed4bc6e service nova] Releasing lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.818732] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquired lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.818990] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.893850] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "d207fb66-ad23-47a5-a304-ecf885de4ced" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.894131] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "d207fb66-ad23-47a5-a304-ecf885de4ced" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.019524] env[62585]: DEBUG nova.scheduler.client.report [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.340158] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.428374] env[62585]: DEBUG nova.compute.manager [req-e57f9c34-2423-4ad7-94f8-3ef9ab0c16cc req-6033f974-f008-4b8a-abd5-5874f67e152c service nova] [instance: dd57237d-875e-453a-b830-749776ce10b4] Received event network-vif-deleted-895bbcc2-1399-4c22-84c8-92c9e1795257 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.436449] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.455735] env[62585]: DEBUG nova.compute.manager [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Received event network-changed-795912b3-b7d8-41c0-8e52-65af33e6e0e8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.456022] env[62585]: DEBUG nova.compute.manager [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Refreshing instance network info cache due to event network-changed-795912b3-b7d8-41c0-8e52-65af33e6e0e8. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 771.456242] env[62585]: DEBUG oslo_concurrency.lockutils [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] Acquiring lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.456383] env[62585]: DEBUG oslo_concurrency.lockutils [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] Acquired lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.456540] env[62585]: DEBUG nova.network.neutron [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Refreshing network info cache for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.524376] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.486s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.525116] env[62585]: ERROR nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Traceback (most recent call last): [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self.driver.spawn(context, instance, image_meta, [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] vm_ref = self.build_virtual_machine(instance, [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] vif_infos = vmwarevif.get_vif_info(self._session, [ 771.525116] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] for vif in network_info: [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] return self._sync_wrapper(fn, *args, **kwargs) [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self.wait() [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self[:] = self._gt.wait() [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] return self._exit_event.wait() [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] current.throw(*self._exc) [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 771.525596] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] result = function(*args, **kwargs) [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] return func(*args, **kwargs) [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] raise e [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] nwinfo = self.network_api.allocate_for_instance( [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] created_port_ids = self._update_ports_for_instance( [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] with excutils.save_and_reraise_exception(): [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] self.force_reraise() [ 771.526185] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] raise self.value [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] updated_port = self._update_port( [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] _ensure_no_port_binding_failure(port) [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] raise exception.PortBindingFailed(port_id=port['id']) [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] nova.exception.PortBindingFailed: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. [ 771.526980] env[62585]: ERROR nova.compute.manager [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] [ 771.526980] env[62585]: DEBUG nova.compute.utils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 771.528466] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.809s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.531649] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Build of instance 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5 was re-scheduled: Binding failed for port 914d5549-a112-41c2-9c00-2fd475dd5265, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 771.532375] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 771.532597] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.532836] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.532899] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.647913] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 771.647913] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 771.647913] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 771.647913] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 771.647913] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 771.647913] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 771.647913] env[62585]: ERROR nova.compute.manager raise self.value [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 771.647913] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 771.647913] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 771.647913] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 771.648569] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 771.648569] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 771.648569] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 771.648569] env[62585]: ERROR nova.compute.manager [ 771.648569] env[62585]: Traceback (most recent call last): [ 771.648569] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 771.648569] env[62585]: listener.cb(fileno) [ 771.648569] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 771.648569] env[62585]: result = function(*args, **kwargs) [ 771.648569] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 771.648569] env[62585]: return func(*args, **kwargs) [ 771.648569] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 771.648569] env[62585]: raise e [ 771.648569] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 771.648569] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 771.648569] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 771.648569] env[62585]: created_port_ids = self._update_ports_for_instance( [ 771.648569] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 771.648569] env[62585]: with excutils.save_and_reraise_exception(): [ 771.648569] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 771.648569] env[62585]: self.force_reraise() [ 771.648569] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 771.648569] env[62585]: raise self.value [ 771.648569] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 771.648569] env[62585]: updated_port = self._update_port( [ 771.648569] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 771.648569] env[62585]: _ensure_no_port_binding_failure(port) [ 771.648569] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 771.648569] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 771.649401] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 771.649401] env[62585]: Removing descriptor: 15 [ 771.676690] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 771.702012] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 771.702305] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 771.702457] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.702637] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 771.702782] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.702925] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 771.703145] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 771.703300] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 771.703462] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 771.703617] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 771.703781] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 771.704977] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8326e806-6aee-43ee-8a9c-59ded7e119e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.713611] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fe73ff-89c8-4456-95fb-a20c1af04e83 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.727590] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Traceback (most recent call last): [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] yield resources [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self.driver.spawn(context, instance, image_meta, [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] vm_ref = self.build_virtual_machine(instance, [ 771.727590] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] for vif in network_info: [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] return self._sync_wrapper(fn, *args, **kwargs) [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self.wait() [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self[:] = self._gt.wait() [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] return self._exit_event.wait() [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 771.727924] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] current.throw(*self._exc) [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] result = function(*args, **kwargs) [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] return func(*args, **kwargs) [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] raise e [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] nwinfo = self.network_api.allocate_for_instance( [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] created_port_ids = self._update_ports_for_instance( [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] with excutils.save_and_reraise_exception(): [ 771.728417] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self.force_reraise() [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] raise self.value [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] updated_port = self._update_port( [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] _ensure_no_port_binding_failure(port) [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] raise exception.PortBindingFailed(port_id=port['id']) [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 771.728686] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] [ 771.728686] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Terminating instance [ 771.731789] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.939412] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Releasing lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.939888] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 771.940093] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.940397] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c294619f-6317-4e36-b480-15fec6325568 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.950471] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633439af-3fe5-4c4e-ba8f-83a44c46e4c8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.975727] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dd57237d-875e-453a-b830-749776ce10b4 could not be found. [ 771.975934] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.976126] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 771.976359] env[62585]: DEBUG oslo.service.loopingcall [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.976576] env[62585]: DEBUG nova.compute.manager [-] [instance: dd57237d-875e-453a-b830-749776ce10b4] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 771.976664] env[62585]: DEBUG nova.network.neutron [-] [instance: dd57237d-875e-453a-b830-749776ce10b4] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.981300] env[62585]: DEBUG nova.network.neutron [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.991593] env[62585]: DEBUG nova.network.neutron [-] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.053740] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.132741] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.267955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94aa7c43-a027-46ea-bd52-d883e0fcb2c5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.279550] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d276c2-990c-4a10-915b-c4273039fa71 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.288152] env[62585]: DEBUG nova.network.neutron [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.324868] env[62585]: DEBUG oslo_concurrency.lockutils [req-09c1a32c-51d3-4166-86bb-d62f903500da req-4f0cec1a-3817-43b0-a766-79b9b1a1fe25 service nova] Releasing lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.324868] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508e3264-e893-4a07-b4da-01e51da834d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.327657] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquired lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.327957] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.336781] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6080aee2-d614-4713-8d48-a47b7408cb99 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.352255] env[62585]: DEBUG nova.compute.provider_tree [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.494825] env[62585]: DEBUG nova.network.neutron [-] [instance: dd57237d-875e-453a-b830-749776ce10b4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.635174] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.635697] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 772.635697] env[62585]: DEBUG nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 772.635852] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.652404] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.855968] env[62585]: DEBUG nova.scheduler.client.report [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.865152] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.998158] env[62585]: INFO nova.compute.manager [-] [instance: dd57237d-875e-453a-b830-749776ce10b4] Took 1.02 seconds to deallocate network for instance. [ 773.001384] env[62585]: DEBUG nova.compute.claims [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 773.001384] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.019597] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.158183] env[62585]: DEBUG nova.network.neutron [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.373295] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.843s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.373295] env[62585]: ERROR nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Traceback (most recent call last): [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self.driver.spawn(context, instance, image_meta, [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 773.373295] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] vm_ref = self.build_virtual_machine(instance, [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] for vif in network_info: [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] return self._sync_wrapper(fn, *args, **kwargs) [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self.wait() [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self[:] = self._gt.wait() [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] return self._exit_event.wait() [ 773.373857] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] current.throw(*self._exc) [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] result = function(*args, **kwargs) [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] return func(*args, **kwargs) [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] raise e [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] nwinfo = self.network_api.allocate_for_instance( [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] created_port_ids = self._update_ports_for_instance( [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 773.374148] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] with excutils.save_and_reraise_exception(): [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] self.force_reraise() [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] raise self.value [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] updated_port = self._update_port( [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] _ensure_no_port_binding_failure(port) [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] raise exception.PortBindingFailed(port_id=port['id']) [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] nova.exception.PortBindingFailed: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. [ 773.374472] env[62585]: ERROR nova.compute.manager [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] [ 773.374735] env[62585]: DEBUG nova.compute.utils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 773.376563] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Build of instance b1587330-1740-4bfd-a0c3-a25794c3ccd3 was re-scheduled: Binding failed for port db88c093-4462-4886-8542-eb8f0dd49f12, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 773.377167] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 773.378827] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquiring lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.378827] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Acquired lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.378827] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.379954] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.310s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.381599] env[62585]: INFO nova.compute.claims [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.479220] env[62585]: DEBUG nova.compute.manager [req-c8e0d979-2521-42c6-b796-0adb290e3ad1 req-97bf737a-9302-4152-a6c3-a372bc04c88d service nova] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Received event network-vif-deleted-795912b3-b7d8-41c0-8e52-65af33e6e0e8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.528020] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Releasing lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.528020] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 773.528020] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.528020] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b945c39-a535-432c-bf42-563322de49d4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.540215] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03612bf-2bc4-4682-b572-cf769333667e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.565021] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 92168077-2b7e-4355-9880-a2f62674fc7e could not be found. [ 773.565021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 773.565021] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 773.565021] env[62585]: DEBUG oslo.service.loopingcall [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.565021] env[62585]: DEBUG nova.compute.manager [-] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.565021] env[62585]: DEBUG nova.network.neutron [-] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 773.587981] env[62585]: DEBUG nova.network.neutron [-] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.660528] env[62585]: INFO nova.compute.manager [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5] Took 1.02 seconds to deallocate network for instance. [ 773.895561] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.000521] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.090215] env[62585]: DEBUG nova.network.neutron [-] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.503560] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Releasing lock "refresh_cache-b1587330-1740-4bfd-a0c3-a25794c3ccd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.503871] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 774.503974] env[62585]: DEBUG nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 774.504162] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 774.537049] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.592108] env[62585]: INFO nova.compute.manager [-] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Took 1.03 seconds to deallocate network for instance. [ 774.595468] env[62585]: DEBUG nova.compute.claims [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 774.595753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.636091] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f685dd88-0ca3-4df6-bb1a-8a2d3f25bfe4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.644229] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f78e523-5397-479d-9014-e119d03c6511 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.678097] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec076e1-3026-41f3-97d8-e95e3135f890 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.685607] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fd820e-a9fe-42a5-8117-7949b5e38f2c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.698991] env[62585]: DEBUG nova.compute.provider_tree [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.700698] env[62585]: INFO nova.scheduler.client.report [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocations for instance 0582ee18-80b2-48d0-9d8d-e82d1b9d60c5 [ 775.043143] env[62585]: DEBUG nova.network.neutron [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.207063] env[62585]: DEBUG nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.210673] env[62585]: DEBUG oslo_concurrency.lockutils [None req-57e23358-7c1f-4332-ad8a-232d0782f8e1 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0582ee18-80b2-48d0-9d8d-e82d1b9d60c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.692s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.546699] env[62585]: INFO nova.compute.manager [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] [instance: b1587330-1740-4bfd-a0c3-a25794c3ccd3] Took 1.04 seconds to deallocate network for instance. [ 775.713659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.714209] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 775.717136] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 775.719602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.188s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.221472] env[62585]: DEBUG nova.compute.utils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 776.223629] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 776.223629] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 776.254931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.317392] env[62585]: DEBUG nova.policy [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc3a99a607fe490fbff954dd2cf89914', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd40ca08c01ef475b82b42c8f7384791a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 776.474159] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c236b3d3-52e7-4eca-a896-811ae2021ca9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.480872] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bb136b-aa5e-4207-8ab9-2579a7cf8c77 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.512825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2be97e1-f100-40aa-9d99-5e50748b1f5f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.520344] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53cf056-9592-4a93-a664-9c1922bba7dc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.535871] env[62585]: DEBUG nova.compute.provider_tree [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.573096] env[62585]: INFO nova.scheduler.client.report [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Deleted allocations for instance b1587330-1740-4bfd-a0c3-a25794c3ccd3 [ 776.697479] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Successfully created port: 1886b56b-9178-4fe5-a5e8-3d1e44673714 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.730507] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 776.951253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "e89b55d3-aa15-4d28-ba80-fe3b45ee289f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.951253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "e89b55d3-aa15-4d28-ba80-fe3b45ee289f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.039966] env[62585]: DEBUG nova.scheduler.client.report [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 777.081803] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e131572d-652b-4e18-80e7-ea49439269ae tempest-ServerRescueTestJSONUnderV235-1848074594 tempest-ServerRescueTestJSONUnderV235-1848074594-project-member] Lock "b1587330-1740-4bfd-a0c3-a25794c3ccd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.282s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.547423] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.828s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.548102] env[62585]: ERROR nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Traceback (most recent call last): [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self.driver.spawn(context, instance, image_meta, [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] vm_ref = self.build_virtual_machine(instance, [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] vif_infos = vmwarevif.get_vif_info(self._session, [ 777.548102] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] for vif in network_info: [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] return self._sync_wrapper(fn, *args, **kwargs) [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self.wait() [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self[:] = self._gt.wait() [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] return self._exit_event.wait() [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] current.throw(*self._exc) [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 777.549139] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] result = function(*args, **kwargs) [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] return func(*args, **kwargs) [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] raise e [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] nwinfo = self.network_api.allocate_for_instance( [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] created_port_ids = self._update_ports_for_instance( [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] with excutils.save_and_reraise_exception(): [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] self.force_reraise() [ 777.549459] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] raise self.value [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] updated_port = self._update_port( [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] _ensure_no_port_binding_failure(port) [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] raise exception.PortBindingFailed(port_id=port['id']) [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] nova.exception.PortBindingFailed: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. [ 777.549970] env[62585]: ERROR nova.compute.manager [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] [ 777.549970] env[62585]: DEBUG nova.compute.utils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 777.550637] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.371s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.551991] env[62585]: INFO nova.compute.claims [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.558018] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Build of instance ad45de09-f60c-4ac5-a4ff-7088d9742d6a was re-scheduled: Binding failed for port 0757c247-2448-4204-a5a6-1e94d33cc383, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 777.558018] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 777.558018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.558018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.558226] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.584538] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 777.606591] env[62585]: DEBUG nova.compute.manager [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Received event network-changed-1886b56b-9178-4fe5-a5e8-3d1e44673714 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 777.606591] env[62585]: DEBUG nova.compute.manager [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Refreshing instance network info cache due to event network-changed-1886b56b-9178-4fe5-a5e8-3d1e44673714. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 777.606787] env[62585]: DEBUG oslo_concurrency.lockutils [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] Acquiring lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.607070] env[62585]: DEBUG oslo_concurrency.lockutils [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] Acquired lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.607431] env[62585]: DEBUG nova.network.neutron [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Refreshing network info cache for port 1886b56b-9178-4fe5-a5e8-3d1e44673714 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.739570] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 777.772330] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 777.772572] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 777.772727] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.772907] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 777.773058] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.773205] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 777.773531] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 777.773907] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 777.774161] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 777.774374] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 777.774587] env[62585]: DEBUG nova.virt.hardware [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 777.775589] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2f09ee-95ba-4faf-b0c6-f98a0e76deff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.784279] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0572a7e6-bebb-4a20-bc28-b5ffe64d94fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.800992] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 777.800992] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 777.800992] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 777.800992] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 777.800992] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.800992] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.800992] env[62585]: ERROR nova.compute.manager raise self.value [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 777.800992] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 777.800992] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.800992] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 777.801474] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.801474] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 777.801474] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 777.801474] env[62585]: ERROR nova.compute.manager [ 777.801474] env[62585]: Traceback (most recent call last): [ 777.801474] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 777.801474] env[62585]: listener.cb(fileno) [ 777.801474] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 777.801474] env[62585]: result = function(*args, **kwargs) [ 777.801474] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 777.801474] env[62585]: return func(*args, **kwargs) [ 777.801474] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 777.801474] env[62585]: raise e [ 777.801474] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 777.801474] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 777.801474] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 777.801474] env[62585]: created_port_ids = self._update_ports_for_instance( [ 777.801474] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 777.801474] env[62585]: with excutils.save_and_reraise_exception(): [ 777.801474] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.801474] env[62585]: self.force_reraise() [ 777.801474] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.801474] env[62585]: raise self.value [ 777.801474] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 777.801474] env[62585]: updated_port = self._update_port( [ 777.801474] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.801474] env[62585]: _ensure_no_port_binding_failure(port) [ 777.801474] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.801474] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 777.802177] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 777.802177] env[62585]: Removing descriptor: 15 [ 777.802575] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Traceback (most recent call last): [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] yield resources [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self.driver.spawn(context, instance, image_meta, [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] vm_ref = self.build_virtual_machine(instance, [ 777.802575] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] for vif in network_info: [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return self._sync_wrapper(fn, *args, **kwargs) [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self.wait() [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self[:] = self._gt.wait() [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return self._exit_event.wait() [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 777.802832] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] result = hub.switch() [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return self.greenlet.switch() [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] result = function(*args, **kwargs) [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return func(*args, **kwargs) [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] raise e [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] nwinfo = self.network_api.allocate_for_instance( [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] created_port_ids = self._update_ports_for_instance( [ 777.803124] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] with excutils.save_and_reraise_exception(): [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self.force_reraise() [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] raise self.value [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] updated_port = self._update_port( [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] _ensure_no_port_binding_failure(port) [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] raise exception.PortBindingFailed(port_id=port['id']) [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 777.803394] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] [ 777.803670] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Terminating instance [ 777.804930] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.082844] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.113817] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.131734] env[62585]: DEBUG nova.network.neutron [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.183018] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.480640] env[62585]: DEBUG nova.network.neutron [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.686135] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "refresh_cache-ad45de09-f60c-4ac5-a4ff-7088d9742d6a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.686391] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 778.686574] env[62585]: DEBUG nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 778.686743] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 778.729770] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.785103] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0358dcf-8163-469f-b659-4c504e05d219 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.792434] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009c5eae-b652-4947-a3b0-a6c4294f157c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.822314] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc152b3-a361-4963-a39a-f9c29fe60693 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.830140] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbdc036-1bfb-41a9-9b94-a3c1ccf3aaba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.843455] env[62585]: DEBUG nova.compute.provider_tree [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.985715] env[62585]: DEBUG oslo_concurrency.lockutils [req-36951d92-d501-4509-8c27-90f893ade3c0 req-011f6691-c017-418b-a6f0-5b88a8f7dbaa service nova] Releasing lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.986161] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquired lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.986365] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.232450] env[62585]: DEBUG nova.network.neutron [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.346433] env[62585]: DEBUG nova.scheduler.client.report [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.506181] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.596659] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.627076] env[62585]: DEBUG nova.compute.manager [req-ec6cae16-7d3a-4a50-9816-2ebe20461d15 req-3b905710-afc3-4594-a1e9-d8908fe1eb21 service nova] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Received event network-vif-deleted-1886b56b-9178-4fe5-a5e8-3d1e44673714 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 779.735841] env[62585]: INFO nova.compute.manager [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: ad45de09-f60c-4ac5-a4ff-7088d9742d6a] Took 1.05 seconds to deallocate network for instance. [ 779.853651] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.854224] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 779.861230] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.441s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.862633] env[62585]: INFO nova.compute.claims [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.099766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Releasing lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.100235] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 780.100430] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.100736] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-818b4ed6-b4c9-46de-8725-bfe3d6d6d65a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.110197] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7165fe54-abb3-49d9-8ac1-925ffa0ebb62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.131366] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed could not be found. [ 780.131595] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.131779] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Took 0.03 seconds to destroy the instance on the hypervisor. [ 780.132040] env[62585]: DEBUG oslo.service.loopingcall [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 780.132282] env[62585]: DEBUG nova.compute.manager [-] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 780.132377] env[62585]: DEBUG nova.network.neutron [-] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.148841] env[62585]: DEBUG nova.network.neutron [-] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.370492] env[62585]: DEBUG nova.compute.utils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 780.375059] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 780.375268] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 780.432105] env[62585]: DEBUG nova.policy [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d55ff7a313340dcb5e604d7ac6a20cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1f5804aeece4e99b4f316398774404b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 780.651020] env[62585]: DEBUG nova.network.neutron [-] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.767723] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Successfully created port: 691fd14e-e0ef-4a6f-859e-92f01fbd7650 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.776872] env[62585]: INFO nova.scheduler.client.report [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleted allocations for instance ad45de09-f60c-4ac5-a4ff-7088d9742d6a [ 780.879930] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 781.142018] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb39f373-87d5-421d-8b1f-641fb0e9ca70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.150451] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b972b414-fe31-4aab-b7cb-e6e3f2df1023 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.155131] env[62585]: INFO nova.compute.manager [-] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Took 1.02 seconds to deallocate network for instance. [ 781.158332] env[62585]: DEBUG nova.compute.claims [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 781.159113] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.186942] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71305f4c-51c3-467f-a887-5dd7a4845c34 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.198917] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e59e8b1-1c71-4f1c-9121-f3cde84d3634 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.225358] env[62585]: DEBUG nova.compute.provider_tree [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.286417] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7dad5b8f-e651-4d52-8d29-ae77ab861845 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "ad45de09-f60c-4ac5-a4ff-7088d9742d6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.158s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.618134] env[62585]: DEBUG nova.compute.manager [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Received event network-changed-691fd14e-e0ef-4a6f-859e-92f01fbd7650 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 781.618307] env[62585]: DEBUG nova.compute.manager [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Refreshing instance network info cache due to event network-changed-691fd14e-e0ef-4a6f-859e-92f01fbd7650. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 781.618519] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] Acquiring lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.619065] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] Acquired lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.619065] env[62585]: DEBUG nova.network.neutron [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Refreshing network info cache for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.732933] env[62585]: DEBUG nova.scheduler.client.report [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.789033] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 781.837478] env[62585]: ERROR nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 781.837478] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.837478] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.837478] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.837478] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.837478] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.837478] env[62585]: ERROR nova.compute.manager raise self.value [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.837478] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 781.837478] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.837478] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 781.837961] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.837961] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 781.837961] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 781.837961] env[62585]: ERROR nova.compute.manager [ 781.838797] env[62585]: Traceback (most recent call last): [ 781.838797] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 781.838797] env[62585]: listener.cb(fileno) [ 781.838797] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.838797] env[62585]: result = function(*args, **kwargs) [ 781.838797] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 781.838797] env[62585]: return func(*args, **kwargs) [ 781.838797] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.838797] env[62585]: raise e [ 781.838797] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.838797] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 781.838797] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.838797] env[62585]: created_port_ids = self._update_ports_for_instance( [ 781.838797] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.838797] env[62585]: with excutils.save_and_reraise_exception(): [ 781.838797] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.838797] env[62585]: self.force_reraise() [ 781.838797] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.838797] env[62585]: raise self.value [ 781.838797] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.838797] env[62585]: updated_port = self._update_port( [ 781.838797] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.838797] env[62585]: _ensure_no_port_binding_failure(port) [ 781.838797] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.838797] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 781.838797] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 781.838797] env[62585]: Removing descriptor: 17 [ 781.890356] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 781.918597] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 781.918597] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 781.918597] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 781.918793] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 781.918895] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 781.919112] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 781.919364] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 781.919625] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 781.919770] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 781.919970] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 781.920199] env[62585]: DEBUG nova.virt.hardware [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 781.921365] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee90157-68a5-485f-89fe-f2953a740e32 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.929191] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df923cbf-1d3e-4021-b383-1c9413c630d8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.943183] env[62585]: ERROR nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Traceback (most recent call last): [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] yield resources [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self.driver.spawn(context, instance, image_meta, [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] vm_ref = self.build_virtual_machine(instance, [ 781.943183] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] vif_infos = vmwarevif.get_vif_info(self._session, [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] for vif in network_info: [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] return self._sync_wrapper(fn, *args, **kwargs) [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self.wait() [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self[:] = self._gt.wait() [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] return self._exit_event.wait() [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 781.943518] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] current.throw(*self._exc) [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] result = function(*args, **kwargs) [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] return func(*args, **kwargs) [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] raise e [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] nwinfo = self.network_api.allocate_for_instance( [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] created_port_ids = self._update_ports_for_instance( [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] with excutils.save_and_reraise_exception(): [ 781.943875] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self.force_reraise() [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] raise self.value [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] updated_port = self._update_port( [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] _ensure_no_port_binding_failure(port) [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] raise exception.PortBindingFailed(port_id=port['id']) [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 781.944215] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] [ 781.944215] env[62585]: INFO nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Terminating instance [ 781.946949] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquiring lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.142335] env[62585]: DEBUG nova.network.neutron [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.238168] env[62585]: DEBUG nova.network.neutron [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.239803] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.240633] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 782.244052] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.042s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.246046] env[62585]: INFO nova.compute.claims [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.317534] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.746364] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac3dd529-f1a9-4628-8d4c-72f129a9aa3c req-66b924c6-abf4-4ee2-afcc-ea364067e338 service nova] Releasing lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.747901] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquired lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.747901] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.752027] env[62585]: DEBUG nova.compute.utils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 782.754413] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 782.754559] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.797024] env[62585]: DEBUG nova.policy [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01befe1db3684d60943c74da2c2c9fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f00751679b29472e9ab92c9e48a99925', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.044980] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "8763a058-b453-4f03-9532-7d7e65efdfb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.044980] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.181111] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Successfully created port: c7aef8b8-56d1-4819-be4b-9c46bf08752f {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.255354] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 783.266482] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.376211] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.501234] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707d6ec0-f717-4e05-af5a-b7c74917532c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.509331] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e331ac-42aa-4ebe-8364-ff3c7fa9b300 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.548625] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a4c65e-08ef-4508-ad92-c5b2ea85e870 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.556377] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc42ad55-34a7-46ae-9c62-d417497ae44a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.569935] env[62585]: DEBUG nova.compute.provider_tree [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.644485] env[62585]: DEBUG nova.compute.manager [req-2ca109df-50de-4de3-b91f-6a98ebc5d70d req-c5dbda4f-3b7d-4643-9858-f7767d28fc27 service nova] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Received event network-vif-deleted-691fd14e-e0ef-4a6f-859e-92f01fbd7650 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.875669] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.875933] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.881937] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Releasing lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.882374] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 783.882510] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.885018] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b8b6ea0-768a-4fe8-9617-28b7ab986d87 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.892203] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dedc3b-8808-46d6-90e2-6cbe35ec3daa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.913667] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66af981d-2fa4-4ef4-ac39-3f8f78c543af could not be found. [ 783.913894] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 783.914093] env[62585]: INFO nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Took 0.03 seconds to destroy the instance on the hypervisor. [ 783.914342] env[62585]: DEBUG oslo.service.loopingcall [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.914565] env[62585]: DEBUG nova.compute.manager [-] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.914660] env[62585]: DEBUG nova.network.neutron [-] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.929985] env[62585]: DEBUG nova.network.neutron [-] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.072999] env[62585]: DEBUG nova.scheduler.client.report [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.124023] env[62585]: ERROR nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 784.124023] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.124023] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.124023] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.124023] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.124023] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.124023] env[62585]: ERROR nova.compute.manager raise self.value [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.124023] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 784.124023] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.124023] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 784.124454] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.124454] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 784.124454] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 784.124454] env[62585]: ERROR nova.compute.manager [ 784.124454] env[62585]: Traceback (most recent call last): [ 784.124454] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 784.124454] env[62585]: listener.cb(fileno) [ 784.124454] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.124454] env[62585]: result = function(*args, **kwargs) [ 784.124454] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.124454] env[62585]: return func(*args, **kwargs) [ 784.124454] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.124454] env[62585]: raise e [ 784.124454] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.124454] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 784.124454] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.124454] env[62585]: created_port_ids = self._update_ports_for_instance( [ 784.124454] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.124454] env[62585]: with excutils.save_and_reraise_exception(): [ 784.124454] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.124454] env[62585]: self.force_reraise() [ 784.124454] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.124454] env[62585]: raise self.value [ 784.124454] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.124454] env[62585]: updated_port = self._update_port( [ 784.124454] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.124454] env[62585]: _ensure_no_port_binding_failure(port) [ 784.124454] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.124454] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 784.125104] env[62585]: nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 784.125104] env[62585]: Removing descriptor: 17 [ 784.269654] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 784.298193] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.298484] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.298665] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.298909] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.299128] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.299330] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.299589] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.299773] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.299965] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.300148] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.300319] env[62585]: DEBUG nova.virt.hardware [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.301176] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8e38bc-f9bd-4004-b6dc-0e12cc11cf70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.309255] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecef5dd9-00ac-427d-8e0a-f53ed3d7d720 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.323162] env[62585]: ERROR nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Traceback (most recent call last): [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] yield resources [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self.driver.spawn(context, instance, image_meta, [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] vm_ref = self.build_virtual_machine(instance, [ 784.323162] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] for vif in network_info: [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] return self._sync_wrapper(fn, *args, **kwargs) [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self.wait() [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self[:] = self._gt.wait() [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] return self._exit_event.wait() [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 784.323498] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] current.throw(*self._exc) [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] result = function(*args, **kwargs) [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] return func(*args, **kwargs) [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] raise e [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] nwinfo = self.network_api.allocate_for_instance( [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] created_port_ids = self._update_ports_for_instance( [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] with excutils.save_and_reraise_exception(): [ 784.323799] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self.force_reraise() [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] raise self.value [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] updated_port = self._update_port( [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] _ensure_no_port_binding_failure(port) [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] raise exception.PortBindingFailed(port_id=port['id']) [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 784.324131] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] [ 784.324131] env[62585]: INFO nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Terminating instance [ 784.325510] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.325669] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.325842] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.433290] env[62585]: DEBUG nova.network.neutron [-] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.578831] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.579422] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.582245] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.605s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.583537] env[62585]: INFO nova.compute.claims [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.843523] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.935336] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.936546] env[62585]: INFO nova.compute.manager [-] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Took 1.02 seconds to deallocate network for instance. [ 784.938651] env[62585]: DEBUG nova.compute.claims [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 784.938847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.088311] env[62585]: DEBUG nova.compute.utils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.091943] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 785.092123] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.130584] env[62585]: DEBUG nova.policy [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16834068ff284380895a4c0b7b14a34c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11e04f6bf4914b4a9f9fca59d0ffd51f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.408489] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Successfully created port: 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.440301] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.440752] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 785.441048] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.441414] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d93898d-a962-44f8-bf7d-b5122bd7b9b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.451079] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61cde7e-1557-4b6c-8b65-a457c8f56534 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.473673] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6f0ee10-c5cc-41ad-8b81-f7644921845b could not be found. [ 785.473913] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.474147] env[62585]: INFO nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 785.474396] env[62585]: DEBUG oslo.service.loopingcall [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.474616] env[62585]: DEBUG nova.compute.manager [-] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 785.474707] env[62585]: DEBUG nova.network.neutron [-] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.487790] env[62585]: DEBUG nova.network.neutron [-] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.593319] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.680007] env[62585]: DEBUG nova.compute.manager [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Received event network-changed-c7aef8b8-56d1-4819-be4b-9c46bf08752f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 785.680332] env[62585]: DEBUG nova.compute.manager [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Refreshing instance network info cache due to event network-changed-c7aef8b8-56d1-4819-be4b-9c46bf08752f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 785.680766] env[62585]: DEBUG oslo_concurrency.lockutils [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] Acquiring lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.680858] env[62585]: DEBUG oslo_concurrency.lockutils [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] Acquired lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.681115] env[62585]: DEBUG nova.network.neutron [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Refreshing network info cache for port c7aef8b8-56d1-4819-be4b-9c46bf08752f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.841815] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673942e0-5c45-4a27-8f89-2877c3ede283 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.850204] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a628ba7-ab56-46cf-8de9-8e16e7a347c0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.879899] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839e6ef7-314f-4656-b2ea-9aaabfa23191 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.887907] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8706576-e785-4b23-b41d-e3ceffcda776 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.902064] env[62585]: DEBUG nova.compute.provider_tree [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.990599] env[62585]: DEBUG nova.network.neutron [-] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.204816] env[62585]: DEBUG nova.network.neutron [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.285361] env[62585]: DEBUG nova.network.neutron [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.405865] env[62585]: DEBUG nova.scheduler.client.report [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.496881] env[62585]: INFO nova.compute.manager [-] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Took 1.02 seconds to deallocate network for instance. [ 786.500896] env[62585]: ERROR nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 786.500896] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.500896] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.500896] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.500896] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.500896] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.500896] env[62585]: ERROR nova.compute.manager raise self.value [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.500896] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 786.500896] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.500896] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 786.501369] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.501369] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 786.501369] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 786.501369] env[62585]: ERROR nova.compute.manager [ 786.501369] env[62585]: Traceback (most recent call last): [ 786.501369] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 786.501369] env[62585]: listener.cb(fileno) [ 786.501369] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.501369] env[62585]: result = function(*args, **kwargs) [ 786.501369] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.501369] env[62585]: return func(*args, **kwargs) [ 786.501369] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.501369] env[62585]: raise e [ 786.501369] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.501369] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 786.501369] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.501369] env[62585]: created_port_ids = self._update_ports_for_instance( [ 786.501369] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.501369] env[62585]: with excutils.save_and_reraise_exception(): [ 786.501369] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.501369] env[62585]: self.force_reraise() [ 786.501369] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.501369] env[62585]: raise self.value [ 786.501369] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.501369] env[62585]: updated_port = self._update_port( [ 786.501369] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.501369] env[62585]: _ensure_no_port_binding_failure(port) [ 786.501369] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.501369] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 786.502087] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 786.502087] env[62585]: Removing descriptor: 17 [ 786.502087] env[62585]: DEBUG nova.compute.claims [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 786.502087] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.606127] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.630781] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.631043] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.631207] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.631389] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.631529] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.631669] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.631873] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.632041] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.632210] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.632369] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.632537] env[62585]: DEBUG nova.virt.hardware [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.633409] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202f1591-3909-4c71-982e-3ce74e5f20a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.641655] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb64437e-446c-4907-8af8-1a87e1c13fd8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.654862] env[62585]: ERROR nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Traceback (most recent call last): [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] yield resources [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self.driver.spawn(context, instance, image_meta, [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] vm_ref = self.build_virtual_machine(instance, [ 786.654862] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] for vif in network_info: [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] return self._sync_wrapper(fn, *args, **kwargs) [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self.wait() [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self[:] = self._gt.wait() [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] return self._exit_event.wait() [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 786.655240] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] current.throw(*self._exc) [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] result = function(*args, **kwargs) [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] return func(*args, **kwargs) [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] raise e [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] nwinfo = self.network_api.allocate_for_instance( [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] created_port_ids = self._update_ports_for_instance( [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] with excutils.save_and_reraise_exception(): [ 786.655582] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self.force_reraise() [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] raise self.value [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] updated_port = self._update_port( [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] _ensure_no_port_binding_failure(port) [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] raise exception.PortBindingFailed(port_id=port['id']) [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 786.655914] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] [ 786.655914] env[62585]: INFO nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Terminating instance [ 786.657123] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquiring lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.657283] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquired lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.657443] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.788368] env[62585]: DEBUG oslo_concurrency.lockutils [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] Releasing lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.788630] env[62585]: DEBUG nova.compute.manager [req-66b5b412-d32a-464b-8f6c-64e07915d0c1 req-ce5b236a-de75-4a20-bdb2-231c4c15bf92 service nova] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Received event network-vif-deleted-c7aef8b8-56d1-4819-be4b-9c46bf08752f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.910333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.910853] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 786.913599] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.912s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.177048] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.258900] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.418789] env[62585]: DEBUG nova.compute.utils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.423432] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 787.423599] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 787.474529] env[62585]: DEBUG nova.policy [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fd0e110bfc94784b4fe881fb27a48e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10320418f69d4f0e88a3adf2a8245237', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 787.619966] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e314e4-9b97-4a5d-8840-5730c7ba79ee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.627725] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4ef5ab-2442-4769-840b-408b948e256b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.666061] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c028555-a2ee-4c32-9261-6eeccf887938 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.676627] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5594690-66a7-4097-be23-3714d50b9dc2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.694881] env[62585]: DEBUG nova.compute.provider_tree [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.705409] env[62585]: DEBUG nova.compute.manager [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Received event network-changed-2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.705601] env[62585]: DEBUG nova.compute.manager [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Refreshing instance network info cache due to event network-changed-2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.705799] env[62585]: DEBUG oslo_concurrency.lockutils [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] Acquiring lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.762232] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Releasing lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.762491] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 787.762698] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 787.763693] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Successfully created port: cedac717-629f-4a55-902e-4a850482fb32 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.765555] env[62585]: DEBUG oslo_concurrency.lockutils [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] Acquired lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.765737] env[62585]: DEBUG nova.network.neutron [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Refreshing network info cache for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.766758] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9791c854-1771-45ea-9fca-cb7e70908dba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.777443] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2c7d31-c2d9-4f2f-9b2b-e2eadfed5e9b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.798909] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3 could not be found. [ 787.799148] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.799331] env[62585]: INFO nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 787.799564] env[62585]: DEBUG oslo.service.loopingcall [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.799774] env[62585]: DEBUG nova.compute.manager [-] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.799868] env[62585]: DEBUG nova.network.neutron [-] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.813781] env[62585]: DEBUG nova.network.neutron [-] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.924641] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 788.200783] env[62585]: DEBUG nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.286143] env[62585]: DEBUG nova.network.neutron [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.316109] env[62585]: DEBUG nova.network.neutron [-] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.362969] env[62585]: DEBUG nova.network.neutron [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.627080] env[62585]: ERROR nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 788.627080] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.627080] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.627080] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.627080] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.627080] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.627080] env[62585]: ERROR nova.compute.manager raise self.value [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.627080] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 788.627080] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.627080] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 788.627466] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.627466] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 788.627466] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 788.627466] env[62585]: ERROR nova.compute.manager [ 788.627466] env[62585]: Traceback (most recent call last): [ 788.627466] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 788.627466] env[62585]: listener.cb(fileno) [ 788.627466] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.627466] env[62585]: result = function(*args, **kwargs) [ 788.627466] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.627466] env[62585]: return func(*args, **kwargs) [ 788.627466] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.627466] env[62585]: raise e [ 788.627466] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.627466] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 788.627466] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.627466] env[62585]: created_port_ids = self._update_ports_for_instance( [ 788.627466] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.627466] env[62585]: with excutils.save_and_reraise_exception(): [ 788.627466] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.627466] env[62585]: self.force_reraise() [ 788.627466] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.627466] env[62585]: raise self.value [ 788.627466] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.627466] env[62585]: updated_port = self._update_port( [ 788.627466] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.627466] env[62585]: _ensure_no_port_binding_failure(port) [ 788.627466] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.627466] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 788.628163] env[62585]: nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 788.628163] env[62585]: Removing descriptor: 17 [ 788.706632] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.792s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.706632] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] Traceback (most recent call last): [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self.driver.spawn(context, instance, image_meta, [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.706632] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] vm_ref = self.build_virtual_machine(instance, [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] for vif in network_info: [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return self._sync_wrapper(fn, *args, **kwargs) [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self.wait() [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self[:] = self._gt.wait() [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return self._exit_event.wait() [ 788.706948] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] result = hub.switch() [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return self.greenlet.switch() [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] result = function(*args, **kwargs) [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] return func(*args, **kwargs) [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] raise e [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] nwinfo = self.network_api.allocate_for_instance( [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.707306] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] created_port_ids = self._update_ports_for_instance( [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] with excutils.save_and_reraise_exception(): [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] self.force_reraise() [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] raise self.value [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] updated_port = self._update_port( [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] _ensure_no_port_binding_failure(port) [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.707660] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] raise exception.PortBindingFailed(port_id=port['id']) [ 788.707997] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] nova.exception.PortBindingFailed: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. [ 788.707997] env[62585]: ERROR nova.compute.manager [instance: dd57237d-875e-453a-b830-749776ce10b4] [ 788.707997] env[62585]: DEBUG nova.compute.utils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 788.708555] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.113s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.711450] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Build of instance dd57237d-875e-453a-b830-749776ce10b4 was re-scheduled: Binding failed for port 895bbcc2-1399-4c22-84c8-92c9e1795257, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 788.711871] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 788.712113] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.712262] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquired lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.712418] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.818682] env[62585]: INFO nova.compute.manager [-] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Took 1.02 seconds to deallocate network for instance. [ 788.820997] env[62585]: DEBUG nova.compute.claims [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 788.821215] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.865873] env[62585]: DEBUG oslo_concurrency.lockutils [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] Releasing lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.866140] env[62585]: DEBUG nova.compute.manager [req-b2f1530c-2f9d-46da-82bd-cca01f364413 req-157f3922-7c03-46dd-a339-a8d1f1c6c8ff service nova] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Received event network-vif-deleted-2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.933074] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 788.958626] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 788.958913] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 788.959078] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 788.959198] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 788.959343] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 788.959487] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 788.959688] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 788.959841] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 788.960014] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 788.960223] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 788.960429] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 788.961293] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecdad36-7005-4f53-b555-c1e375b9e603 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.969249] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191eea20-8a90-45db-9c97-a4a16dd39c93 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.982810] env[62585]: ERROR nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Traceback (most recent call last): [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] yield resources [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self.driver.spawn(context, instance, image_meta, [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] vm_ref = self.build_virtual_machine(instance, [ 788.982810] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] for vif in network_info: [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] return self._sync_wrapper(fn, *args, **kwargs) [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self.wait() [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self[:] = self._gt.wait() [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] return self._exit_event.wait() [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 788.983185] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] current.throw(*self._exc) [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] result = function(*args, **kwargs) [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] return func(*args, **kwargs) [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] raise e [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] nwinfo = self.network_api.allocate_for_instance( [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] created_port_ids = self._update_ports_for_instance( [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] with excutils.save_and_reraise_exception(): [ 788.983582] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self.force_reraise() [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] raise self.value [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] updated_port = self._update_port( [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] _ensure_no_port_binding_failure(port) [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] raise exception.PortBindingFailed(port_id=port['id']) [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 788.983868] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] [ 788.983868] env[62585]: INFO nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Terminating instance [ 788.985087] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.985252] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.985418] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.234889] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.310294] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.412955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1469a566-e88c-42d3-b41f-0a4a1ae72df9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.420767] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fd9a5c-028f-4bea-a6ce-f0e9b1028fce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.449440] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d510d4b-fb88-4707-8dfe-aeb85ec89abf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.456467] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccd28b7-0e99-413e-95aa-610433736f37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.468949] env[62585]: DEBUG nova.compute.provider_tree [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.500888] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.567388] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.730611] env[62585]: DEBUG nova.compute.manager [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Received event network-changed-cedac717-629f-4a55-902e-4a850482fb32 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 789.730812] env[62585]: DEBUG nova.compute.manager [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Refreshing instance network info cache due to event network-changed-cedac717-629f-4a55-902e-4a850482fb32. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 789.731032] env[62585]: DEBUG oslo_concurrency.lockutils [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] Acquiring lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.813064] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Releasing lock "refresh_cache-dd57237d-875e-453a-b830-749776ce10b4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.813307] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 789.813486] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.813653] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.827461] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.972097] env[62585]: DEBUG nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.069733] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.070232] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 790.070466] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.070766] env[62585]: DEBUG oslo_concurrency.lockutils [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] Acquired lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.070938] env[62585]: DEBUG nova.network.neutron [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Refreshing network info cache for port cedac717-629f-4a55-902e-4a850482fb32 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.072184] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84b94600-fd05-49b8-b575-b589c5f299fa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.081253] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a1c41-b48a-4edd-ac3a-1eb01af87a80 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.102801] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a9daa60-e93a-4276-bf23-652ae7b0618b could not be found. [ 790.103021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 790.103206] env[62585]: INFO nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 790.103437] env[62585]: DEBUG oslo.service.loopingcall [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.103647] env[62585]: DEBUG nova.compute.manager [-] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 790.103737] env[62585]: DEBUG nova.network.neutron [-] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.118991] env[62585]: DEBUG nova.network.neutron [-] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.329851] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.476455] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.768s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.477045] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Traceback (most recent call last): [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self.driver.spawn(context, instance, image_meta, [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] vm_ref = self.build_virtual_machine(instance, [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] vif_infos = vmwarevif.get_vif_info(self._session, [ 790.477045] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] for vif in network_info: [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] return self._sync_wrapper(fn, *args, **kwargs) [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self.wait() [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self[:] = self._gt.wait() [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] return self._exit_event.wait() [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] current.throw(*self._exc) [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 790.477339] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] result = function(*args, **kwargs) [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] return func(*args, **kwargs) [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] raise e [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] nwinfo = self.network_api.allocate_for_instance( [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] created_port_ids = self._update_ports_for_instance( [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] with excutils.save_and_reraise_exception(): [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] self.force_reraise() [ 790.477619] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] raise self.value [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] updated_port = self._update_port( [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] _ensure_no_port_binding_failure(port) [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] raise exception.PortBindingFailed(port_id=port['id']) [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] nova.exception.PortBindingFailed: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. [ 790.477974] env[62585]: ERROR nova.compute.manager [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] [ 790.477974] env[62585]: DEBUG nova.compute.utils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 790.479012] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.224s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.480503] env[62585]: INFO nova.compute.claims [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.483068] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Build of instance 92168077-2b7e-4355-9880-a2f62674fc7e was re-scheduled: Binding failed for port 795912b3-b7d8-41c0-8e52-65af33e6e0e8, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 790.483453] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 790.483671] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.483811] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquired lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.483964] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.590771] env[62585]: DEBUG nova.network.neutron [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.620932] env[62585]: DEBUG nova.network.neutron [-] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.670676] env[62585]: DEBUG nova.network.neutron [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.833341] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: dd57237d-875e-453a-b830-749776ce10b4] Took 1.02 seconds to deallocate network for instance. [ 791.004381] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.073582] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.123411] env[62585]: INFO nova.compute.manager [-] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Took 1.02 seconds to deallocate network for instance. [ 791.126710] env[62585]: DEBUG nova.compute.claims [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 791.126861] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.173059] env[62585]: DEBUG oslo_concurrency.lockutils [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] Releasing lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.173342] env[62585]: DEBUG nova.compute.manager [req-5393c218-82e6-4b5a-9ab1-005315e19c5e req-96588e33-b71d-4a7e-854c-0c68b665471f service nova] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Received event network-vif-deleted-cedac717-629f-4a55-902e-4a850482fb32 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.576547] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Releasing lock "refresh_cache-92168077-2b7e-4355-9880-a2f62674fc7e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.576784] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 791.577803] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 791.577803] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 791.592145] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.694134] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a0dbf6-4533-40a0-a7d3-98ffa006fdfe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.703034] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65698f08-43e7-4248-9a06-912c02cecdf3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.731475] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3beeb8-33dc-4479-ae43-102161b4f2e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.738561] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f47376-1ec6-4b19-8973-d95fcf1aa934 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.751292] env[62585]: DEBUG nova.compute.provider_tree [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.860781] env[62585]: INFO nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Deleted allocations for instance dd57237d-875e-453a-b830-749776ce10b4 [ 792.095490] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.254998] env[62585]: DEBUG nova.scheduler.client.report [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.370602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "dd57237d-875e-453a-b830-749776ce10b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.728s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.597852] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 92168077-2b7e-4355-9880-a2f62674fc7e] Took 1.02 seconds to deallocate network for instance. [ 792.759973] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.760554] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 792.763086] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.649s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.764424] env[62585]: INFO nova.compute.claims [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.873248] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.268800] env[62585]: DEBUG nova.compute.utils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 793.272031] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 793.272138] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.322067] env[62585]: DEBUG nova.policy [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fd0e110bfc94784b4fe881fb27a48e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10320418f69d4f0e88a3adf2a8245237', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 793.394279] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.571568] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Successfully created port: d7391688-1eaa-4b3a-9fdb-9e1c117df433 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.632756] env[62585]: INFO nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Deleted allocations for instance 92168077-2b7e-4355-9880-a2f62674fc7e [ 793.773564] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 793.972925] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787c80f5-1347-450a-b417-03b2a65390ca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.980803] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a65b52d-56bb-458b-aeeb-eec08e1e1a96 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.010721] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4b4ab4-a664-4cab-9184-66848a7c4121 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.017386] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08ec902-e7a4-4e9c-9a24-4f7402847234 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.030216] env[62585]: DEBUG nova.compute.provider_tree [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.141983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "92168077-2b7e-4355-9880-a2f62674fc7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.468s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.272994] env[62585]: DEBUG nova.compute.manager [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Received event network-changed-d7391688-1eaa-4b3a-9fdb-9e1c117df433 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 794.273291] env[62585]: DEBUG nova.compute.manager [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Refreshing instance network info cache due to event network-changed-d7391688-1eaa-4b3a-9fdb-9e1c117df433. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 794.273581] env[62585]: DEBUG oslo_concurrency.lockutils [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] Acquiring lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.273781] env[62585]: DEBUG oslo_concurrency.lockutils [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] Acquired lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.273976] env[62585]: DEBUG nova.network.neutron [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Refreshing network info cache for port d7391688-1eaa-4b3a-9fdb-9e1c117df433 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.469755] env[62585]: ERROR nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 794.469755] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 794.469755] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 794.469755] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 794.469755] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 794.469755] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 794.469755] env[62585]: ERROR nova.compute.manager raise self.value [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 794.469755] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 794.469755] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 794.469755] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 794.470245] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 794.470245] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 794.470245] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 794.470245] env[62585]: ERROR nova.compute.manager [ 794.470245] env[62585]: Traceback (most recent call last): [ 794.470245] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 794.470245] env[62585]: listener.cb(fileno) [ 794.470245] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 794.470245] env[62585]: result = function(*args, **kwargs) [ 794.470245] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 794.470245] env[62585]: return func(*args, **kwargs) [ 794.470245] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 794.470245] env[62585]: raise e [ 794.470245] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 794.470245] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 794.470245] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 794.470245] env[62585]: created_port_ids = self._update_ports_for_instance( [ 794.470245] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 794.470245] env[62585]: with excutils.save_and_reraise_exception(): [ 794.470245] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 794.470245] env[62585]: self.force_reraise() [ 794.470245] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 794.470245] env[62585]: raise self.value [ 794.470245] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 794.470245] env[62585]: updated_port = self._update_port( [ 794.470245] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 794.470245] env[62585]: _ensure_no_port_binding_failure(port) [ 794.470245] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 794.470245] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 794.470927] env[62585]: nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 794.470927] env[62585]: Removing descriptor: 17 [ 794.533225] env[62585]: DEBUG nova.scheduler.client.report [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.644696] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 794.789629] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 794.792441] env[62585]: DEBUG nova.network.neutron [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.812943] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.813184] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.813339] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.813521] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.813665] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.813809] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.814015] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.814178] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.814342] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.814516] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.814704] env[62585]: DEBUG nova.virt.hardware [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.815620] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2da1640-66cc-4a15-94e2-f52958a1181c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.823619] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f389b93d-e82e-49a1-bc58-94cc1fa01812 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.839014] env[62585]: ERROR nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Traceback (most recent call last): [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] yield resources [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self.driver.spawn(context, instance, image_meta, [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] vm_ref = self.build_virtual_machine(instance, [ 794.839014] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] vif_infos = vmwarevif.get_vif_info(self._session, [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] for vif in network_info: [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] return self._sync_wrapper(fn, *args, **kwargs) [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self.wait() [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self[:] = self._gt.wait() [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] return self._exit_event.wait() [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 794.839377] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] current.throw(*self._exc) [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] result = function(*args, **kwargs) [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] return func(*args, **kwargs) [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] raise e [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] nwinfo = self.network_api.allocate_for_instance( [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] created_port_ids = self._update_ports_for_instance( [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] with excutils.save_and_reraise_exception(): [ 794.839664] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self.force_reraise() [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] raise self.value [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] updated_port = self._update_port( [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] _ensure_no_port_binding_failure(port) [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] raise exception.PortBindingFailed(port_id=port['id']) [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 794.839969] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] [ 794.839969] env[62585]: INFO nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Terminating instance [ 794.841403] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.861443] env[62585]: DEBUG nova.network.neutron [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.038830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.039588] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 795.042362] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.884s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.167810] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.364851] env[62585]: DEBUG oslo_concurrency.lockutils [req-653663b7-16b7-4890-ad8c-9ae4d209aa11 req-6f5d3888-6b35-43f7-ba43-1ca8aab00e95 service nova] Releasing lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.365342] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.365533] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.547151] env[62585]: DEBUG nova.compute.utils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 795.551716] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 795.551888] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 795.603796] env[62585]: DEBUG nova.policy [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12ea5326e4bc4c7195db3aeb1b64c2b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de7e07f7b1ff417f8875ca2b8a5e85a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 795.722031] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3091d5-157d-489e-95ce-2f803faa9652 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.729170] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d969a711-6d3d-4e26-9c1b-89558dbbbf51 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.757568] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095af151-528a-4776-8fcf-392f190d42dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.764189] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e5cad9-f01d-4fc5-ab63-96e7a6c9e951 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.776478] env[62585]: DEBUG nova.compute.provider_tree [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.868629] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Successfully created port: 7a9e3ef3-6668-4c0a-a02e-5e5731832c37 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.890165] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.994891] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.054715] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 796.136881] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Successfully created port: 33480c9e-f4a8-462c-b6ac-fe2ae36d9f1b {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.280486] env[62585]: DEBUG nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.302358] env[62585]: DEBUG nova.compute.manager [req-6ff49021-9ebe-4b8a-913b-8b13b30ff1f6 req-92522e72-c7e2-4643-9df2-c5090fc7823a service nova] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Received event network-vif-deleted-d7391688-1eaa-4b3a-9fdb-9e1c117df433 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 796.363798] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Successfully created port: d2383980-f717-4171-8df4-da4d859ae446 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.499135] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.499295] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 796.499545] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.499993] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38f0b49f-c6c1-4f57-9b56-b94b8de6466b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.513711] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d532f33-e246-4cbc-8ed5-92ae4c7bdced {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.547112] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4dfc00d9-64db-439e-baee-041562f7354b could not be found. [ 796.547501] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.547783] env[62585]: INFO nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 796.548171] env[62585]: DEBUG oslo.service.loopingcall [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.548536] env[62585]: DEBUG nova.compute.manager [-] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 796.548683] env[62585]: DEBUG nova.network.neutron [-] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 796.571944] env[62585]: DEBUG nova.network.neutron [-] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.785280] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.743s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.785931] env[62585]: ERROR nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Traceback (most recent call last): [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self.driver.spawn(context, instance, image_meta, [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] vm_ref = self.build_virtual_machine(instance, [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 796.785931] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] for vif in network_info: [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return self._sync_wrapper(fn, *args, **kwargs) [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self.wait() [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self[:] = self._gt.wait() [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return self._exit_event.wait() [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] result = hub.switch() [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 796.786311] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return self.greenlet.switch() [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] result = function(*args, **kwargs) [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] return func(*args, **kwargs) [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] raise e [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] nwinfo = self.network_api.allocate_for_instance( [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] created_port_ids = self._update_ports_for_instance( [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] with excutils.save_and_reraise_exception(): [ 796.786615] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] self.force_reraise() [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] raise self.value [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] updated_port = self._update_port( [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] _ensure_no_port_binding_failure(port) [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] raise exception.PortBindingFailed(port_id=port['id']) [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] nova.exception.PortBindingFailed: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. [ 796.786956] env[62585]: ERROR nova.compute.manager [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] [ 796.787267] env[62585]: DEBUG nova.compute.utils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 796.787909] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.471s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.789379] env[62585]: INFO nova.compute.claims [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.791881] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Build of instance 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed was re-scheduled: Binding failed for port 1886b56b-9178-4fe5-a5e8-3d1e44673714, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 796.792314] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 796.792537] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquiring lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.792680] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Acquired lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.792836] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.070548] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 797.075090] env[62585]: DEBUG nova.network.neutron [-] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.103261] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 797.103509] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 797.103665] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.103846] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 797.103989] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.104149] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 797.104350] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 797.104504] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 797.104663] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 797.104822] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 797.104989] env[62585]: DEBUG nova.virt.hardware [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.106167] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3ab94e-4e11-4c48-beaa-db5b6511607a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.114775] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9821acf0-2eeb-4a1e-8119-9b053bda2651 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.299253] env[62585]: ERROR nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 797.299253] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 797.299253] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.299253] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.299253] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.299253] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.299253] env[62585]: ERROR nova.compute.manager raise self.value [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.299253] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 797.299253] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.299253] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 797.299765] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.299765] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 797.299765] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 797.299765] env[62585]: ERROR nova.compute.manager [ 797.299765] env[62585]: Traceback (most recent call last): [ 797.299765] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 797.299765] env[62585]: listener.cb(fileno) [ 797.299765] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 797.299765] env[62585]: result = function(*args, **kwargs) [ 797.299765] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 797.299765] env[62585]: return func(*args, **kwargs) [ 797.299765] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 797.299765] env[62585]: raise e [ 797.299765] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 797.299765] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 797.299765] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.299765] env[62585]: created_port_ids = self._update_ports_for_instance( [ 797.299765] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.299765] env[62585]: with excutils.save_and_reraise_exception(): [ 797.299765] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.299765] env[62585]: self.force_reraise() [ 797.299765] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.299765] env[62585]: raise self.value [ 797.299765] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.299765] env[62585]: updated_port = self._update_port( [ 797.299765] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.299765] env[62585]: _ensure_no_port_binding_failure(port) [ 797.299765] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.299765] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 797.300561] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 797.300561] env[62585]: Removing descriptor: 17 [ 797.300637] env[62585]: ERROR nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Traceback (most recent call last): [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] yield resources [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self.driver.spawn(context, instance, image_meta, [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self._vmops.spawn(context, instance, image_meta, injected_files, [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] vm_ref = self.build_virtual_machine(instance, [ 797.300637] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] vif_infos = vmwarevif.get_vif_info(self._session, [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] for vif in network_info: [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return self._sync_wrapper(fn, *args, **kwargs) [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self.wait() [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self[:] = self._gt.wait() [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return self._exit_event.wait() [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 797.300933] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] result = hub.switch() [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return self.greenlet.switch() [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] result = function(*args, **kwargs) [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return func(*args, **kwargs) [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] raise e [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] nwinfo = self.network_api.allocate_for_instance( [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] created_port_ids = self._update_ports_for_instance( [ 797.301345] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] with excutils.save_and_reraise_exception(): [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self.force_reraise() [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] raise self.value [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] updated_port = self._update_port( [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] _ensure_no_port_binding_failure(port) [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] raise exception.PortBindingFailed(port_id=port['id']) [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 797.301692] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] [ 797.302059] env[62585]: INFO nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Terminating instance [ 797.303716] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.303920] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquired lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.304156] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.316752] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.393271] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.578478] env[62585]: INFO nova.compute.manager [-] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Took 1.03 seconds to deallocate network for instance. [ 797.580915] env[62585]: DEBUG nova.compute.claims [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 797.581117] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.856256] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.898823] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Releasing lock "refresh_cache-3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.898823] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 797.899035] env[62585]: DEBUG nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.899594] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.915431] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.963109] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.019743] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e535aa8-0877-4735-b541-6129af65c846 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.027727] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f48159-9797-41c9-b48d-e4c6bd668c9e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.059210] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33125ed0-7f61-4014-a7f4-0fb732bc43c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.066249] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb76f85-846a-4e3a-b528-f86b50d39edd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.079280] env[62585]: DEBUG nova.compute.provider_tree [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.331358] env[62585]: DEBUG nova.compute.manager [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Received event network-changed-7a9e3ef3-6668-4c0a-a02e-5e5731832c37 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.331559] env[62585]: DEBUG nova.compute.manager [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Refreshing instance network info cache due to event network-changed-7a9e3ef3-6668-4c0a-a02e-5e5731832c37. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 798.331751] env[62585]: DEBUG oslo_concurrency.lockutils [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] Acquiring lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.420100] env[62585]: DEBUG nova.network.neutron [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.466134] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Releasing lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.466573] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 798.466883] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 798.467210] env[62585]: DEBUG oslo_concurrency.lockutils [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] Acquired lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.467305] env[62585]: DEBUG nova.network.neutron [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Refreshing network info cache for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 798.468336] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10093ddc-2c7f-492f-a664-421ee7661992 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.477214] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20daabc0-7539-4d5a-93c6-5dda450d6bd2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.500551] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e4bd743b-b3a6-4872-9e33-a0183b976292 could not be found. [ 798.500770] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 798.500945] env[62585]: INFO nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Took 0.03 seconds to destroy the instance on the hypervisor. [ 798.501191] env[62585]: DEBUG oslo.service.loopingcall [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.501392] env[62585]: DEBUG nova.compute.manager [-] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 798.501484] env[62585]: DEBUG nova.network.neutron [-] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 798.533445] env[62585]: DEBUG nova.network.neutron [-] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.582589] env[62585]: DEBUG nova.scheduler.client.report [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.923632] env[62585]: INFO nova.compute.manager [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] [instance: 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed] Took 1.02 seconds to deallocate network for instance. [ 798.992117] env[62585]: DEBUG nova.network.neutron [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.078570] env[62585]: DEBUG nova.network.neutron [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.087124] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.087686] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 799.092332] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.152s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.375336] env[62585]: DEBUG nova.network.neutron [-] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.581216] env[62585]: DEBUG oslo_concurrency.lockutils [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] Releasing lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.581512] env[62585]: DEBUG nova.compute.manager [req-afbf5195-2662-498a-912e-19b15b52110a req-f988c197-49ce-4cb5-92b1-0d827f8a6b9e service nova] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Received event network-vif-deleted-7a9e3ef3-6668-4c0a-a02e-5e5731832c37 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.596410] env[62585]: DEBUG nova.compute.utils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.601852] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 799.602045] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.660113] env[62585]: DEBUG nova.policy [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79e20a95276d44ce8b8323e1cbe05904', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2e49d4e1baa42e987434709bff86f37', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 799.766197] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a8982b-019b-4653-89c0-249e6ce906b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.773700] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e485b2e8-e3e6-4614-86bf-13d648dd17f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.803558] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b28c7f2-d19e-431d-bd8f-7fa2542d283e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.810892] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0b6140-eaab-402b-9ffa-bffc6f5a705c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.823760] env[62585]: DEBUG nova.compute.provider_tree [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.878300] env[62585]: INFO nova.compute.manager [-] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Took 1.38 seconds to deallocate network for instance. [ 799.880548] env[62585]: DEBUG nova.compute.claims [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 799.880726] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.922567] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Successfully created port: faaa0631-b196-4de4-8644-db77250640ab {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.951703] env[62585]: INFO nova.scheduler.client.report [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Deleted allocations for instance 3d0c96e4-65b2-46f7-a742-f36cd11ff8ed [ 800.103292] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 800.327126] env[62585]: DEBUG nova.scheduler.client.report [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.463129] env[62585]: DEBUG oslo_concurrency.lockutils [None req-85ce4328-82ba-44ac-9962-2904209df499 tempest-ListServersNegativeTestJSON-1168173202 tempest-ListServersNegativeTestJSON-1168173202-project-member] Lock "3d0c96e4-65b2-46f7-a742-f36cd11ff8ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.759s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.620953] env[62585]: DEBUG nova.compute.manager [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Received event network-changed-faaa0631-b196-4de4-8644-db77250640ab {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 800.621196] env[62585]: DEBUG nova.compute.manager [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Refreshing instance network info cache due to event network-changed-faaa0631-b196-4de4-8644-db77250640ab. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 800.621418] env[62585]: DEBUG oslo_concurrency.lockutils [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] Acquiring lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.621561] env[62585]: DEBUG oslo_concurrency.lockutils [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] Acquired lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.621717] env[62585]: DEBUG nova.network.neutron [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Refreshing network info cache for port faaa0631-b196-4de4-8644-db77250640ab {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.838019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.746s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.838019] env[62585]: ERROR nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Traceback (most recent call last): [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self.driver.spawn(context, instance, image_meta, [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 800.838019] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] vm_ref = self.build_virtual_machine(instance, [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] vif_infos = vmwarevif.get_vif_info(self._session, [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] for vif in network_info: [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] return self._sync_wrapper(fn, *args, **kwargs) [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self.wait() [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self[:] = self._gt.wait() [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] return self._exit_event.wait() [ 800.838334] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] current.throw(*self._exc) [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] result = function(*args, **kwargs) [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] return func(*args, **kwargs) [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] raise e [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] nwinfo = self.network_api.allocate_for_instance( [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] created_port_ids = self._update_ports_for_instance( [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.838620] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] with excutils.save_and_reraise_exception(): [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] self.force_reraise() [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] raise self.value [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] updated_port = self._update_port( [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] _ensure_no_port_binding_failure(port) [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] raise exception.PortBindingFailed(port_id=port['id']) [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] nova.exception.PortBindingFailed: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. [ 800.838913] env[62585]: ERROR nova.compute.manager [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] [ 800.839210] env[62585]: DEBUG nova.compute.utils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 800.842639] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.342s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.849019] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Build of instance 66af981d-2fa4-4ef4-ac39-3f8f78c543af was re-scheduled: Binding failed for port 691fd14e-e0ef-4a6f-859e-92f01fbd7650, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 800.849019] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 800.849019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquiring lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.849019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Acquired lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.849990] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.852887] env[62585]: ERROR nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 800.852887] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.852887] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.852887] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.852887] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.852887] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.852887] env[62585]: ERROR nova.compute.manager raise self.value [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.852887] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 800.852887] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.852887] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 800.854759] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.854759] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 800.854759] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 800.854759] env[62585]: ERROR nova.compute.manager [ 800.854759] env[62585]: Traceback (most recent call last): [ 800.854759] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 800.854759] env[62585]: listener.cb(fileno) [ 800.854759] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.854759] env[62585]: result = function(*args, **kwargs) [ 800.854759] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 800.854759] env[62585]: return func(*args, **kwargs) [ 800.854759] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 800.854759] env[62585]: raise e [ 800.854759] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.854759] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 800.854759] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.854759] env[62585]: created_port_ids = self._update_ports_for_instance( [ 800.854759] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.854759] env[62585]: with excutils.save_and_reraise_exception(): [ 800.854759] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.854759] env[62585]: self.force_reraise() [ 800.854759] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.854759] env[62585]: raise self.value [ 800.854759] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.854759] env[62585]: updated_port = self._update_port( [ 800.854759] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.854759] env[62585]: _ensure_no_port_binding_failure(port) [ 800.854759] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.854759] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 800.855721] env[62585]: nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 800.855721] env[62585]: Removing descriptor: 15 [ 800.966258] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 801.115239] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 801.144498] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.144749] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.144902] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.145095] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.145241] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.145385] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.145585] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.145739] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.145902] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.146223] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.146346] env[62585]: DEBUG nova.virt.hardware [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.147279] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d1cd56-ea86-4374-a9ea-40156271e65f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.150785] env[62585]: DEBUG nova.network.neutron [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.159448] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c0531a-006a-4a85-aa86-6513a24d0ef4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.174524] env[62585]: ERROR nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Traceback (most recent call last): [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] yield resources [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self.driver.spawn(context, instance, image_meta, [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] vm_ref = self.build_virtual_machine(instance, [ 801.174524] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] for vif in network_info: [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] return self._sync_wrapper(fn, *args, **kwargs) [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self.wait() [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self[:] = self._gt.wait() [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] return self._exit_event.wait() [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 801.175065] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] current.throw(*self._exc) [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] result = function(*args, **kwargs) [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] return func(*args, **kwargs) [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] raise e [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] nwinfo = self.network_api.allocate_for_instance( [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] created_port_ids = self._update_ports_for_instance( [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] with excutils.save_and_reraise_exception(): [ 801.175351] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self.force_reraise() [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] raise self.value [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] updated_port = self._update_port( [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] _ensure_no_port_binding_failure(port) [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] raise exception.PortBindingFailed(port_id=port['id']) [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 801.175693] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] [ 801.175693] env[62585]: INFO nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Terminating instance [ 801.176922] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.271686] env[62585]: DEBUG nova.network.neutron [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.370035] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.492056] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.558867] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f672949-361e-4460-bad7-a31fe6e05ce4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.567009] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff058e2-3396-4174-bd66-5240dbc57f8a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.598028] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21ebaba-deaa-4ab2-8ae7-4fdb5e992b5f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.605462] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf8c809-58b6-4b50-a093-de6874a41e4f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.618391] env[62585]: DEBUG nova.compute.provider_tree [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.696544] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.773883] env[62585]: DEBUG oslo_concurrency.lockutils [req-73c1ea02-384d-44e7-aae5-defe0240e2e7 req-60b18aa9-6522-472a-8d00-eae1f33932d2 service nova] Releasing lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.774387] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquired lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.774602] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.123074] env[62585]: DEBUG nova.scheduler.client.report [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.199575] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Releasing lock "refresh_cache-66af981d-2fa4-4ef4-ac39-3f8f78c543af" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.199835] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 802.200033] env[62585]: DEBUG nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 802.200210] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.217704] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.301441] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.437956] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.628775] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.786s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.629420] env[62585]: ERROR nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Traceback (most recent call last): [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self.driver.spawn(context, instance, image_meta, [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] vm_ref = self.build_virtual_machine(instance, [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] vif_infos = vmwarevif.get_vif_info(self._session, [ 802.629420] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] for vif in network_info: [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] return self._sync_wrapper(fn, *args, **kwargs) [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self.wait() [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self[:] = self._gt.wait() [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] return self._exit_event.wait() [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] current.throw(*self._exc) [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 802.629722] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] result = function(*args, **kwargs) [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] return func(*args, **kwargs) [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] raise e [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] nwinfo = self.network_api.allocate_for_instance( [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] created_port_ids = self._update_ports_for_instance( [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] with excutils.save_and_reraise_exception(): [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] self.force_reraise() [ 802.630348] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] raise self.value [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] updated_port = self._update_port( [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] _ensure_no_port_binding_failure(port) [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] raise exception.PortBindingFailed(port_id=port['id']) [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] nova.exception.PortBindingFailed: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. [ 802.630989] env[62585]: ERROR nova.compute.manager [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] [ 802.630989] env[62585]: DEBUG nova.compute.utils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 802.631724] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.810s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.634361] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Build of instance c6f0ee10-c5cc-41ad-8b81-f7644921845b was re-scheduled: Binding failed for port c7aef8b8-56d1-4819-be4b-9c46bf08752f, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 802.634769] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 802.634987] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.635212] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.635532] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.645570] env[62585]: DEBUG nova.compute.manager [req-90ad3313-427f-4656-a4dd-ce7b7e1cc7f7 req-7f5c9718-add8-47a6-9cde-d729ef57a44c service nova] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Received event network-vif-deleted-faaa0631-b196-4de4-8644-db77250640ab {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.720898] env[62585]: DEBUG nova.network.neutron [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.941335] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Releasing lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.941855] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 802.942105] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.942417] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32134deb-3389-4bd5-9b98-31ba5ae81d82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.951593] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf097fa-2657-4439-8697-cff5c0d712c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.972861] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54e0a14b-cc4f-4445-8d86-f25cc410d7d0 could not be found. [ 802.973087] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.973269] env[62585]: INFO nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 802.973512] env[62585]: DEBUG oslo.service.loopingcall [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.973726] env[62585]: DEBUG nova.compute.manager [-] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 802.973819] env[62585]: DEBUG nova.network.neutron [-] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.988524] env[62585]: DEBUG nova.network.neutron [-] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.159197] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.226778] env[62585]: INFO nova.compute.manager [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] [instance: 66af981d-2fa4-4ef4-ac39-3f8f78c543af] Took 1.02 seconds to deallocate network for instance. [ 803.290054] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.347166] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93166486-1e8c-4cc7-9b92-ff3f98ee9c07 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.354821] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376a0f44-fec3-4ad9-b942-ff0d717950aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.385274] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a6e88e-f128-486c-9013-e0dc956d1dcd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.392475] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6a44df-71c5-4857-85c3-b5a3fead5adf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.404838] env[62585]: DEBUG nova.compute.provider_tree [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.490957] env[62585]: DEBUG nova.network.neutron [-] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.792414] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "refresh_cache-c6f0ee10-c5cc-41ad-8b81-f7644921845b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.792655] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 803.792834] env[62585]: DEBUG nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 803.793018] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.815800] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.907951] env[62585]: DEBUG nova.scheduler.client.report [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 803.993341] env[62585]: INFO nova.compute.manager [-] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Took 1.02 seconds to deallocate network for instance. [ 803.995684] env[62585]: DEBUG nova.compute.claims [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 803.995863] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.275244] env[62585]: INFO nova.scheduler.client.report [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Deleted allocations for instance 66af981d-2fa4-4ef4-ac39-3f8f78c543af [ 804.324529] env[62585]: DEBUG nova.network.neutron [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.413317] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.782s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.413945] env[62585]: ERROR nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Traceback (most recent call last): [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self.driver.spawn(context, instance, image_meta, [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] vm_ref = self.build_virtual_machine(instance, [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.413945] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] for vif in network_info: [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] return self._sync_wrapper(fn, *args, **kwargs) [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self.wait() [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self[:] = self._gt.wait() [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] return self._exit_event.wait() [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] current.throw(*self._exc) [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.414336] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] result = function(*args, **kwargs) [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] return func(*args, **kwargs) [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] raise e [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] nwinfo = self.network_api.allocate_for_instance( [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] created_port_ids = self._update_ports_for_instance( [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] with excutils.save_and_reraise_exception(): [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] self.force_reraise() [ 804.414671] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] raise self.value [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] updated_port = self._update_port( [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] _ensure_no_port_binding_failure(port) [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] raise exception.PortBindingFailed(port_id=port['id']) [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] nova.exception.PortBindingFailed: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. [ 804.415022] env[62585]: ERROR nova.compute.manager [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] [ 804.415022] env[62585]: DEBUG nova.compute.utils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 804.416396] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.289s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.420648] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Build of instance 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3 was re-scheduled: Binding failed for port 2d6dc99f-ef27-43fc-89e4-f59b4d1cff9b, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 804.421099] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 804.421331] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquiring lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.421474] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Acquired lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.421628] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.786459] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a16e75dc-dc24-4a2d-8c24-47fbed691295 tempest-ServerActionsTestOtherA-448083351 tempest-ServerActionsTestOtherA-448083351-project-member] Lock "66af981d-2fa4-4ef4-ac39-3f8f78c543af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.998s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.828088] env[62585]: INFO nova.compute.manager [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: c6f0ee10-c5cc-41ad-8b81-f7644921845b] Took 1.03 seconds to deallocate network for instance. [ 804.941271] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.032110] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.131735] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669f7963-eaeb-4bbe-922f-c1700a47cda1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.141196] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c683b65-1761-4c00-929e-a1f18437c0e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.175630] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69111c18-5eff-4335-b717-baca2f0da9d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.185071] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7850cf0e-20f7-4849-89ff-ba938cca7331 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.198865] env[62585]: DEBUG nova.compute.provider_tree [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.289707] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 805.535383] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Releasing lock "refresh_cache-5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.535493] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 805.535655] env[62585]: DEBUG nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 805.535818] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 805.560166] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.702096] env[62585]: DEBUG nova.scheduler.client.report [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.808273] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.867298] env[62585]: INFO nova.scheduler.client.report [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted allocations for instance c6f0ee10-c5cc-41ad-8b81-f7644921845b [ 806.040067] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.040300] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.063709] env[62585]: DEBUG nova.network.neutron [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.209303] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.793s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.209934] env[62585]: ERROR nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Traceback (most recent call last): [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self.driver.spawn(context, instance, image_meta, [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] vm_ref = self.build_virtual_machine(instance, [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.209934] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] for vif in network_info: [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] return self._sync_wrapper(fn, *args, **kwargs) [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self.wait() [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self[:] = self._gt.wait() [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] return self._exit_event.wait() [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] current.throw(*self._exc) [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.210355] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] result = function(*args, **kwargs) [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] return func(*args, **kwargs) [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] raise e [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] nwinfo = self.network_api.allocate_for_instance( [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] created_port_ids = self._update_ports_for_instance( [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] with excutils.save_and_reraise_exception(): [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] self.force_reraise() [ 806.211119] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] raise self.value [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] updated_port = self._update_port( [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] _ensure_no_port_binding_failure(port) [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] raise exception.PortBindingFailed(port_id=port['id']) [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] nova.exception.PortBindingFailed: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. [ 806.211771] env[62585]: ERROR nova.compute.manager [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] [ 806.211771] env[62585]: DEBUG nova.compute.utils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 806.212992] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Build of instance 8a9daa60-e93a-4276-bf23-652ae7b0618b was re-scheduled: Binding failed for port cedac717-629f-4a55-902e-4a850482fb32, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 806.213642] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 806.213784] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.213963] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.214147] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.217744] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.824s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.219812] env[62585]: INFO nova.compute.claims [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.379900] env[62585]: DEBUG oslo_concurrency.lockutils [None req-368642cb-f91f-462e-8394-cff9fed6edbe tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "c6f0ee10-c5cc-41ad-8b81-f7644921845b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.807s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.566707] env[62585]: INFO nova.compute.manager [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] [instance: 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3] Took 1.03 seconds to deallocate network for instance. [ 806.749717] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.882279] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 807.061354] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.403172] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.420052] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a04f99e-6c3d-44f9-b7a3-f51499a0f0ec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.427808] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3fd82b-e939-4683-a5f8-9f48775da4b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.457808] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756500e1-f67f-499d-9874-7d6f4437447f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.465049] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e3013c-d7b8-495f-b785-e5c300750b8e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.477585] env[62585]: DEBUG nova.compute.provider_tree [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.563684] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "refresh_cache-8a9daa60-e93a-4276-bf23-652ae7b0618b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.563939] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 807.564146] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 807.564393] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.579640] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.594466] env[62585]: INFO nova.scheduler.client.report [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Deleted allocations for instance 5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3 [ 807.758211] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "679380d4-5b96-4c30-bac9-f7163f19c609" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.758211] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "679380d4-5b96-4c30-bac9-f7163f19c609" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.981370] env[62585]: DEBUG nova.scheduler.client.report [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.082302] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.101485] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1898b728-5350-47b4-a6c1-01e5f5f574de tempest-ServersTestJSON-556798697 tempest-ServersTestJSON-556798697-project-member] Lock "5cb1710d-cf66-4ea4-ad6d-6fa1b2e1ffd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.265s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.486520] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.486884] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 808.489937] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.322s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.491335] env[62585]: INFO nova.compute.claims [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.585629] env[62585]: INFO nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 8a9daa60-e93a-4276-bf23-652ae7b0618b] Took 1.02 seconds to deallocate network for instance. [ 808.603605] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 808.998489] env[62585]: DEBUG nova.compute.utils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 809.007415] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 809.007415] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.131224] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.177197] env[62585]: DEBUG nova.policy [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26abf4eaa71482b8fd3c6425a9c683d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48929b5f0c2c41ddade223ab57002fc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 809.448479] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Successfully created port: 66d937d1-8514-49f1-9152-3881137092dd {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.503575] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 809.618638] env[62585]: INFO nova.scheduler.client.report [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleted allocations for instance 8a9daa60-e93a-4276-bf23-652ae7b0618b [ 809.772400] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ed665d-cfe8-4502-b40e-79c2e8dde040 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.780029] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813d0ca4-dcfd-4361-a547-411e200165c1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.809033] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d818c465-250e-4787-a02f-787f72ad00b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.816459] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862b786c-2807-46fe-a43d-daafb21e547f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.830318] env[62585]: DEBUG nova.compute.provider_tree [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.130408] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "8a9daa60-e93a-4276-bf23-652ae7b0618b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.024s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.198149] env[62585]: DEBUG nova.compute.manager [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Received event network-changed-66d937d1-8514-49f1-9152-3881137092dd {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 810.198149] env[62585]: DEBUG nova.compute.manager [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Refreshing instance network info cache due to event network-changed-66d937d1-8514-49f1-9152-3881137092dd. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 810.198149] env[62585]: DEBUG oslo_concurrency.lockutils [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] Acquiring lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.198149] env[62585]: DEBUG oslo_concurrency.lockutils [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] Acquired lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.199196] env[62585]: DEBUG nova.network.neutron [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Refreshing network info cache for port 66d937d1-8514-49f1-9152-3881137092dd {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.334179] env[62585]: DEBUG nova.scheduler.client.report [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 810.435561] env[62585]: ERROR nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 810.435561] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.435561] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 810.435561] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 810.435561] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.435561] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.435561] env[62585]: ERROR nova.compute.manager raise self.value [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 810.435561] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 810.435561] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.435561] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 810.436371] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.436371] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 810.436371] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 810.436371] env[62585]: ERROR nova.compute.manager [ 810.436371] env[62585]: Traceback (most recent call last): [ 810.436371] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 810.436371] env[62585]: listener.cb(fileno) [ 810.436371] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.436371] env[62585]: result = function(*args, **kwargs) [ 810.436371] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 810.436371] env[62585]: return func(*args, **kwargs) [ 810.436371] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 810.436371] env[62585]: raise e [ 810.436371] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.436371] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 810.436371] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 810.436371] env[62585]: created_port_ids = self._update_ports_for_instance( [ 810.436371] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 810.436371] env[62585]: with excutils.save_and_reraise_exception(): [ 810.436371] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.436371] env[62585]: self.force_reraise() [ 810.436371] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.436371] env[62585]: raise self.value [ 810.436371] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 810.436371] env[62585]: updated_port = self._update_port( [ 810.436371] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.436371] env[62585]: _ensure_no_port_binding_failure(port) [ 810.436371] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.436371] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 810.437155] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 810.437155] env[62585]: Removing descriptor: 15 [ 810.517433] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 810.545201] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 810.545462] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 810.545620] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.545802] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 810.545946] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.546114] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 810.546322] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 810.546479] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 810.546645] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 810.546805] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 810.546976] env[62585]: DEBUG nova.virt.hardware [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 810.547910] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7a6d53-745d-4074-b06a-6d812f54dc06 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.560053] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcdb4a3-4c51-496f-be50-4d21f62b113f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.571665] env[62585]: ERROR nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Traceback (most recent call last): [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] yield resources [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self.driver.spawn(context, instance, image_meta, [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] vm_ref = self.build_virtual_machine(instance, [ 810.571665] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] for vif in network_info: [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] return self._sync_wrapper(fn, *args, **kwargs) [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self.wait() [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self[:] = self._gt.wait() [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] return self._exit_event.wait() [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 810.571953] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] current.throw(*self._exc) [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] result = function(*args, **kwargs) [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] return func(*args, **kwargs) [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] raise e [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] nwinfo = self.network_api.allocate_for_instance( [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] created_port_ids = self._update_ports_for_instance( [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] with excutils.save_and_reraise_exception(): [ 810.572257] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self.force_reraise() [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] raise self.value [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] updated_port = self._update_port( [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] _ensure_no_port_binding_failure(port) [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] raise exception.PortBindingFailed(port_id=port['id']) [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 810.572599] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] [ 810.572599] env[62585]: INFO nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Terminating instance [ 810.574149] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.633933] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 810.816581] env[62585]: DEBUG nova.network.neutron [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.839559] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.840048] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 810.843553] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.262s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.913696] env[62585]: DEBUG nova.network.neutron [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.155515] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.348781] env[62585]: DEBUG nova.compute.utils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 811.350233] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 811.350405] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.408789] env[62585]: DEBUG nova.policy [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cbaeae4fa1e4dc996a4d8a364ea0dae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44e32d293ad64cd499926859857e023e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 811.416718] env[62585]: DEBUG oslo_concurrency.lockutils [req-5a1582d9-9472-4e35-9eff-262157941da1 req-7bf36e79-4956-46dd-ab78-221547b598a9 service nova] Releasing lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.417411] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.417638] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.494297] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc9c4f9-deab-4f0a-b67e-3ff0bf77e9a0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.501820] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf722a8-d448-4dc9-9f42-c4dbd00c8f36 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.531314] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a2fa17-c17c-4f9b-91ec-48c10b4a0c70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.537778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93425e76-4f34-49c3-857f-27b7d32bbf77 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.551148] env[62585]: DEBUG nova.compute.provider_tree [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.671712] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Successfully created port: 52853daf-8aeb-4e00-b8de-4d44b42e6529 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.855575] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 811.936640] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.013946] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.054354] env[62585]: DEBUG nova.scheduler.client.report [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 812.224207] env[62585]: DEBUG nova.compute.manager [req-c74daa2f-2bc9-437f-bb4f-9facbd43196e req-d16d1663-5b98-4e45-9cf1-249b2a97a36d service nova] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Received event network-vif-deleted-66d937d1-8514-49f1-9152-3881137092dd {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 812.516468] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.516899] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 812.517104] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.517411] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cce2c506-2abf-4dc4-a037-516534e844ce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.523633] env[62585]: ERROR nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 812.523633] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 812.523633] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 812.523633] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 812.523633] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.523633] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.523633] env[62585]: ERROR nova.compute.manager raise self.value [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 812.523633] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 812.523633] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.523633] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 812.524138] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.524138] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 812.524138] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 812.524138] env[62585]: ERROR nova.compute.manager [ 812.524138] env[62585]: Traceback (most recent call last): [ 812.524138] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 812.524138] env[62585]: listener.cb(fileno) [ 812.524138] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 812.524138] env[62585]: result = function(*args, **kwargs) [ 812.524138] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 812.524138] env[62585]: return func(*args, **kwargs) [ 812.524138] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 812.524138] env[62585]: raise e [ 812.524138] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 812.524138] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 812.524138] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 812.524138] env[62585]: created_port_ids = self._update_ports_for_instance( [ 812.524138] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 812.524138] env[62585]: with excutils.save_and_reraise_exception(): [ 812.524138] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.524138] env[62585]: self.force_reraise() [ 812.524138] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.524138] env[62585]: raise self.value [ 812.524138] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 812.524138] env[62585]: updated_port = self._update_port( [ 812.524138] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.524138] env[62585]: _ensure_no_port_binding_failure(port) [ 812.524138] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.524138] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 812.524884] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 812.524884] env[62585]: Removing descriptor: 15 [ 812.527566] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61650cde-593e-45e8-86fb-214e498efd48 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.550499] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aed35d7d-f826-4601-aa4e-1d1dccd51d3a could not be found. [ 812.550499] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 812.550499] env[62585]: INFO nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 812.550499] env[62585]: DEBUG oslo.service.loopingcall [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.550499] env[62585]: DEBUG nova.compute.manager [-] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.550499] env[62585]: DEBUG nova.network.neutron [-] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 812.559471] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.716s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.560027] env[62585]: ERROR nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Traceback (most recent call last): [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self.driver.spawn(context, instance, image_meta, [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] vm_ref = self.build_virtual_machine(instance, [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.560027] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] for vif in network_info: [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] return self._sync_wrapper(fn, *args, **kwargs) [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self.wait() [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self[:] = self._gt.wait() [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] return self._exit_event.wait() [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] current.throw(*self._exc) [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 812.560368] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] result = function(*args, **kwargs) [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] return func(*args, **kwargs) [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] raise e [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] nwinfo = self.network_api.allocate_for_instance( [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] created_port_ids = self._update_ports_for_instance( [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] with excutils.save_and_reraise_exception(): [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] self.force_reraise() [ 812.560881] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] raise self.value [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] updated_port = self._update_port( [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] _ensure_no_port_binding_failure(port) [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] raise exception.PortBindingFailed(port_id=port['id']) [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] nova.exception.PortBindingFailed: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. [ 812.561249] env[62585]: ERROR nova.compute.manager [instance: 4dfc00d9-64db-439e-baee-041562f7354b] [ 812.561249] env[62585]: DEBUG nova.compute.utils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.561840] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.681s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.564571] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Build of instance 4dfc00d9-64db-439e-baee-041562f7354b was re-scheduled: Binding failed for port d7391688-1eaa-4b3a-9fdb-9e1c117df433, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 812.564969] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 812.565198] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.565343] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.565499] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.566739] env[62585]: DEBUG nova.network.neutron [-] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.867296] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 812.892680] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 812.892931] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 812.893099] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.893288] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 812.893432] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.893573] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 812.893769] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 812.893925] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 812.894096] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 812.894257] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 812.894427] env[62585]: DEBUG nova.virt.hardware [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.895277] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0642dff9-193c-4ad6-987d-2b5a67dc32ac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.902714] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf2e758-cc88-4c60-8eb4-d9b65e09a9ae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.915639] env[62585]: ERROR nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Traceback (most recent call last): [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] yield resources [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self.driver.spawn(context, instance, image_meta, [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] vm_ref = self.build_virtual_machine(instance, [ 812.915639] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] for vif in network_info: [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] return self._sync_wrapper(fn, *args, **kwargs) [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self.wait() [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self[:] = self._gt.wait() [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] return self._exit_event.wait() [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 812.916043] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] current.throw(*self._exc) [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] result = function(*args, **kwargs) [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] return func(*args, **kwargs) [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] raise e [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] nwinfo = self.network_api.allocate_for_instance( [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] created_port_ids = self._update_ports_for_instance( [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] with excutils.save_and_reraise_exception(): [ 812.916410] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self.force_reraise() [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] raise self.value [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] updated_port = self._update_port( [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] _ensure_no_port_binding_failure(port) [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] raise exception.PortBindingFailed(port_id=port['id']) [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 812.916773] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] [ 812.916773] env[62585]: INFO nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Terminating instance [ 812.917846] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.918008] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.918176] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.073390] env[62585]: DEBUG nova.network.neutron [-] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.087583] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.175172] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.220637] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3ccaef-8c71-4c19-9a04-644ad708c65f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.228152] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8bfd1f-8470-434f-85f4-de5064d975ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.258349] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2a4db8-ff38-4bd8-918a-ce8319d3ff1e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.265312] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0c1e7d-e80f-41bb-aff0-4cb49675d0e3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.278365] env[62585]: DEBUG nova.compute.provider_tree [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.435666] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.523490] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.575548] env[62585]: INFO nova.compute.manager [-] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Took 1.03 seconds to deallocate network for instance. [ 813.577916] env[62585]: DEBUG nova.compute.claims [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 813.578135] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.677557] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "refresh_cache-4dfc00d9-64db-439e-baee-041562f7354b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.677794] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 813.677979] env[62585]: DEBUG nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 813.678168] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.693251] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.781726] env[62585]: DEBUG nova.scheduler.client.report [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 814.026594] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.027039] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 814.027249] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.027577] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-918969d6-99bd-46f6-a497-cc16e8e1ecd8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.036732] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf43a17-14d5-41c0-98c7-a7a55de0d9a9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.058045] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6483148a-b53d-46b9-8926-07b628f2ea3b could not be found. [ 814.058363] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.058553] env[62585]: INFO nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 814.058789] env[62585]: DEBUG oslo.service.loopingcall [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.059027] env[62585]: DEBUG nova.compute.manager [-] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 814.059158] env[62585]: DEBUG nova.network.neutron [-] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.072145] env[62585]: DEBUG nova.network.neutron [-] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.196356] env[62585]: DEBUG nova.network.neutron [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.279177] env[62585]: DEBUG nova.compute.manager [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Received event network-changed-52853daf-8aeb-4e00-b8de-4d44b42e6529 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 814.279386] env[62585]: DEBUG nova.compute.manager [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Refreshing instance network info cache due to event network-changed-52853daf-8aeb-4e00-b8de-4d44b42e6529. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 814.279600] env[62585]: DEBUG oslo_concurrency.lockutils [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] Acquiring lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.279776] env[62585]: DEBUG oslo_concurrency.lockutils [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] Acquired lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.279901] env[62585]: DEBUG nova.network.neutron [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Refreshing network info cache for port 52853daf-8aeb-4e00-b8de-4d44b42e6529 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.286154] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.724s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.286783] env[62585]: ERROR nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Traceback (most recent call last): [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self.driver.spawn(context, instance, image_meta, [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self._vmops.spawn(context, instance, image_meta, injected_files, [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] vm_ref = self.build_virtual_machine(instance, [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] vif_infos = vmwarevif.get_vif_info(self._session, [ 814.286783] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] for vif in network_info: [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return self._sync_wrapper(fn, *args, **kwargs) [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self.wait() [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self[:] = self._gt.wait() [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return self._exit_event.wait() [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] result = hub.switch() [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 814.287128] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return self.greenlet.switch() [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] result = function(*args, **kwargs) [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] return func(*args, **kwargs) [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] raise e [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] nwinfo = self.network_api.allocate_for_instance( [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] created_port_ids = self._update_ports_for_instance( [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] with excutils.save_and_reraise_exception(): [ 814.287638] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] self.force_reraise() [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] raise self.value [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] updated_port = self._update_port( [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] _ensure_no_port_binding_failure(port) [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] raise exception.PortBindingFailed(port_id=port['id']) [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] nova.exception.PortBindingFailed: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. [ 814.287985] env[62585]: ERROR nova.compute.manager [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] [ 814.288352] env[62585]: DEBUG nova.compute.utils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 814.289239] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.797s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.290678] env[62585]: INFO nova.compute.claims [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.293500] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Build of instance e4bd743b-b3a6-4872-9e33-a0183b976292 was re-scheduled: Binding failed for port 7a9e3ef3-6668-4c0a-a02e-5e5731832c37, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 814.293943] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 814.294181] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.294328] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquired lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.294485] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.574090] env[62585]: DEBUG nova.network.neutron [-] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.698725] env[62585]: INFO nova.compute.manager [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 4dfc00d9-64db-439e-baee-041562f7354b] Took 1.02 seconds to deallocate network for instance. [ 814.799407] env[62585]: DEBUG nova.network.neutron [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.815562] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.868779] env[62585]: DEBUG nova.network.neutron [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.887584] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.076515] env[62585]: INFO nova.compute.manager [-] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Took 1.02 seconds to deallocate network for instance. [ 815.078738] env[62585]: DEBUG nova.compute.claims [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 815.078914] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.371149] env[62585]: DEBUG oslo_concurrency.lockutils [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] Releasing lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.371431] env[62585]: DEBUG nova.compute.manager [req-8a456bc8-27ae-4f63-8f57-237fbfc68e8c req-93ce1224-ffaa-4a13-90ef-55a89139215a service nova] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Received event network-vif-deleted-52853daf-8aeb-4e00-b8de-4d44b42e6529 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 815.389832] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Releasing lock "refresh_cache-e4bd743b-b3a6-4872-9e33-a0183b976292" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.390075] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 815.390256] env[62585]: DEBUG nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 815.390423] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.404961] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.434898] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded07aae-8de6-40a0-a9e0-b08bef44a5d8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.442280] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16854682-026a-4871-b7f5-e6a11d0c606d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.471882] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c04720d-74fd-4477-8e2c-36f706f86560 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.478907] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3b0d29-6f85-4e69-a24d-5786da80b0ac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.491868] env[62585]: DEBUG nova.compute.provider_tree [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.727176] env[62585]: INFO nova.scheduler.client.report [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleted allocations for instance 4dfc00d9-64db-439e-baee-041562f7354b [ 815.908757] env[62585]: DEBUG nova.network.neutron [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.994567] env[62585]: DEBUG nova.scheduler.client.report [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 816.235186] env[62585]: DEBUG oslo_concurrency.lockutils [None req-446cdb4d-eda1-4856-902c-f76918f6e370 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "4dfc00d9-64db-439e-baee-041562f7354b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.105s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.411754] env[62585]: INFO nova.compute.manager [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: e4bd743b-b3a6-4872-9e33-a0183b976292] Took 1.02 seconds to deallocate network for instance. [ 816.499422] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.499940] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 816.502366] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.506s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.738168] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 817.006783] env[62585]: DEBUG nova.compute.utils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.008207] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 817.008450] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.061596] env[62585]: DEBUG nova.policy [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9a2336e0b124f03ad700405bcad8f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b8936eaf754cbcbd1b099846a3146d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 817.135683] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f51a86d-6bc2-449b-91ea-680ea78afca6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.143192] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7eebc0f-8a6d-4032-9208-0b8505301e14 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.173693] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c2484b-4164-4198-9097-ef5e5714ba04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.181036] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4601799-892e-40c3-8947-4efb308ed55d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.193800] env[62585]: DEBUG nova.compute.provider_tree [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.256924] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.343988] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Successfully created port: 6c3726d1-8ffe-420d-9985-5ef0a82289c4 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.448480] env[62585]: INFO nova.scheduler.client.report [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Deleted allocations for instance e4bd743b-b3a6-4872-9e33-a0183b976292 [ 817.514191] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 817.597783] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "67e5af2f-4eec-41ec-916f-9f9b77596943" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.598043] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.620332] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "d644c700-c5d1-4549-b73b-0573f268dc40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.621474] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "d644c700-c5d1-4549-b73b-0573f268dc40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.696548] env[62585]: DEBUG nova.scheduler.client.report [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.959229] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd642f1e-13a2-4f67-afc1-3b9d6cf28e51 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "e4bd743b-b3a6-4872-9e33-a0183b976292" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.476s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.035865] env[62585]: DEBUG nova.compute.manager [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Received event network-changed-6c3726d1-8ffe-420d-9985-5ef0a82289c4 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.036076] env[62585]: DEBUG nova.compute.manager [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Refreshing instance network info cache due to event network-changed-6c3726d1-8ffe-420d-9985-5ef0a82289c4. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 818.036328] env[62585]: DEBUG oslo_concurrency.lockutils [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] Acquiring lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.036482] env[62585]: DEBUG oslo_concurrency.lockutils [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] Acquired lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.036644] env[62585]: DEBUG nova.network.neutron [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Refreshing network info cache for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.201380] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.699s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.202148] env[62585]: ERROR nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Traceback (most recent call last): [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self.driver.spawn(context, instance, image_meta, [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] vm_ref = self.build_virtual_machine(instance, [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 818.202148] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] for vif in network_info: [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] return self._sync_wrapper(fn, *args, **kwargs) [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self.wait() [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self[:] = self._gt.wait() [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] return self._exit_event.wait() [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] current.throw(*self._exc) [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.202433] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] result = function(*args, **kwargs) [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] return func(*args, **kwargs) [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] raise e [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] nwinfo = self.network_api.allocate_for_instance( [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] created_port_ids = self._update_ports_for_instance( [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] with excutils.save_and_reraise_exception(): [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] self.force_reraise() [ 818.202763] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] raise self.value [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] updated_port = self._update_port( [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] _ensure_no_port_binding_failure(port) [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] raise exception.PortBindingFailed(port_id=port['id']) [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] nova.exception.PortBindingFailed: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. [ 818.203082] env[62585]: ERROR nova.compute.manager [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] [ 818.203082] env[62585]: DEBUG nova.compute.utils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 818.204115] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.396s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.205533] env[62585]: INFO nova.compute.claims [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.208218] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Build of instance 54e0a14b-cc4f-4445-8d86-f25cc410d7d0 was re-scheduled: Binding failed for port faaa0631-b196-4de4-8644-db77250640ab, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 818.208671] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 818.208898] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquiring lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.209348] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Acquired lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.209348] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.217013] env[62585]: ERROR nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 818.217013] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.217013] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.217013] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.217013] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.217013] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.217013] env[62585]: ERROR nova.compute.manager raise self.value [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.217013] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 818.217013] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.217013] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 818.217497] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.217497] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 818.217497] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 818.217497] env[62585]: ERROR nova.compute.manager [ 818.217497] env[62585]: Traceback (most recent call last): [ 818.217497] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 818.217497] env[62585]: listener.cb(fileno) [ 818.217497] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.217497] env[62585]: result = function(*args, **kwargs) [ 818.217497] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.217497] env[62585]: return func(*args, **kwargs) [ 818.217497] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 818.217497] env[62585]: raise e [ 818.217497] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.217497] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 818.217497] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.217497] env[62585]: created_port_ids = self._update_ports_for_instance( [ 818.217497] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.217497] env[62585]: with excutils.save_and_reraise_exception(): [ 818.217497] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.217497] env[62585]: self.force_reraise() [ 818.217497] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.217497] env[62585]: raise self.value [ 818.217497] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.217497] env[62585]: updated_port = self._update_port( [ 818.217497] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.217497] env[62585]: _ensure_no_port_binding_failure(port) [ 818.217497] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.217497] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 818.218288] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 818.218288] env[62585]: Removing descriptor: 17 [ 818.461759] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 818.522801] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 818.552287] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 818.552287] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 818.552287] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.552433] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 818.552433] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.552433] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 818.553091] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 818.554090] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 818.554461] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 818.554871] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 818.555223] env[62585]: DEBUG nova.virt.hardware [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 818.558071] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993870b6-18d2-49be-b8b6-4b6be5521e21 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.562279] env[62585]: DEBUG nova.network.neutron [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.570751] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0f2a29-d60b-4553-aaa0-116a4077579c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.586774] env[62585]: ERROR nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Traceback (most recent call last): [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] yield resources [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self.driver.spawn(context, instance, image_meta, [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self._vmops.spawn(context, instance, image_meta, injected_files, [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] vm_ref = self.build_virtual_machine(instance, [ 818.586774] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] vif_infos = vmwarevif.get_vif_info(self._session, [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] for vif in network_info: [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] return self._sync_wrapper(fn, *args, **kwargs) [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self.wait() [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self[:] = self._gt.wait() [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] return self._exit_event.wait() [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 818.588350] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] current.throw(*self._exc) [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] result = function(*args, **kwargs) [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] return func(*args, **kwargs) [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] raise e [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] nwinfo = self.network_api.allocate_for_instance( [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] created_port_ids = self._update_ports_for_instance( [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] with excutils.save_and_reraise_exception(): [ 818.588989] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self.force_reraise() [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] raise self.value [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] updated_port = self._update_port( [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] _ensure_no_port_binding_failure(port) [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] raise exception.PortBindingFailed(port_id=port['id']) [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 818.589593] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] [ 818.589593] env[62585]: INFO nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Terminating instance [ 818.590067] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.661049] env[62585]: DEBUG nova.network.neutron [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.728117] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.819728] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.982570] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.163566] env[62585]: DEBUG oslo_concurrency.lockutils [req-27dda325-3580-4394-8895-16188ea85c9d req-935216cf-31a6-4c62-9150-2a308aeebeff service nova] Releasing lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.164086] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.164343] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.322246] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Releasing lock "refresh_cache-54e0a14b-cc4f-4445-8d86-f25cc410d7d0" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.322487] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 819.322669] env[62585]: DEBUG nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 819.322832] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.351950] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.370703] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8261d6-3aa9-47c0-8e15-805f13dd74a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.378481] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0d274e-9b34-4c96-82ce-b9e528d1695c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.411250] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138e5297-7b96-4709-82c4-12862cfe008e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.420247] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0666c25a-89fd-4175-9040-049c30612f8e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.432221] env[62585]: DEBUG nova.compute.provider_tree [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.684422] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.796516] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.855543] env[62585]: DEBUG nova.network.neutron [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.936055] env[62585]: DEBUG nova.scheduler.client.report [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 820.108097] env[62585]: DEBUG nova.compute.manager [req-21596726-d0c9-4aba-884b-5cca3c99181b req-68ee3f97-9aad-47ed-a98c-bfdd8d04388d service nova] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Received event network-vif-deleted-6c3726d1-8ffe-420d-9985-5ef0a82289c4 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.299554] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.300017] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 820.300222] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.300540] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5102972a-e70e-4aef-889a-ef69f12c9052 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.309933] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3fd796-1766-4713-bcb0-e10e4629c334 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.330855] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e4373e2a-cc21-41b7-be28-9b140ab43247 could not be found. [ 820.331033] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.331225] env[62585]: INFO nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Took 0.03 seconds to destroy the instance on the hypervisor. [ 820.331462] env[62585]: DEBUG oslo.service.loopingcall [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.331687] env[62585]: DEBUG nova.compute.manager [-] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.331779] env[62585]: DEBUG nova.network.neutron [-] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.347315] env[62585]: DEBUG nova.network.neutron [-] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.358499] env[62585]: INFO nova.compute.manager [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] [instance: 54e0a14b-cc4f-4445-8d86-f25cc410d7d0] Took 1.04 seconds to deallocate network for instance. [ 820.440221] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.440736] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 820.443187] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.040s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.444549] env[62585]: INFO nova.compute.claims [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.825452] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "d2c6418c-b070-4c46-824b-18638e9b569f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.825452] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "d2c6418c-b070-4c46-824b-18638e9b569f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.850329] env[62585]: DEBUG nova.network.neutron [-] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.948512] env[62585]: DEBUG nova.compute.utils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.952120] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 820.952298] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.992311] env[62585]: DEBUG nova.policy [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 821.252679] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Successfully created port: 9a870b5d-e28e-4aec-a108-6a7e9978d6d5 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.356167] env[62585]: INFO nova.compute.manager [-] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Took 1.02 seconds to deallocate network for instance. [ 821.359196] env[62585]: DEBUG nova.compute.claims [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 821.359196] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.387537] env[62585]: INFO nova.scheduler.client.report [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Deleted allocations for instance 54e0a14b-cc4f-4445-8d86-f25cc410d7d0 [ 821.458797] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 821.627069] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5f41dd-bb41-475d-9aa9-dcd9f75f3f17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.634623] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af22382f-a54e-4e34-885e-dd01b4908980 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.665985] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bb7bcd-0d74-432f-8369-3303261641d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.673249] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed137b49-4539-4584-a33a-95efd04f0acf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.686506] env[62585]: DEBUG nova.compute.provider_tree [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.894993] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ef231192-f7dd-4f96-8492-64765d875bd5 tempest-VolumesAdminNegativeTest-1038077645 tempest-VolumesAdminNegativeTest-1038077645-project-member] Lock "54e0a14b-cc4f-4445-8d86-f25cc410d7d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.572s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.135403] env[62585]: DEBUG nova.compute.manager [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Received event network-changed-9a870b5d-e28e-4aec-a108-6a7e9978d6d5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.135462] env[62585]: DEBUG nova.compute.manager [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Refreshing instance network info cache due to event network-changed-9a870b5d-e28e-4aec-a108-6a7e9978d6d5. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 822.136795] env[62585]: DEBUG oslo_concurrency.lockutils [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] Acquiring lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.136795] env[62585]: DEBUG oslo_concurrency.lockutils [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] Acquired lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.136795] env[62585]: DEBUG nova.network.neutron [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Refreshing network info cache for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.177091] env[62585]: ERROR nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 822.177091] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.177091] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.177091] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.177091] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.177091] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.177091] env[62585]: ERROR nova.compute.manager raise self.value [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.177091] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 822.177091] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.177091] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 822.177902] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.177902] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 822.177902] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 822.177902] env[62585]: ERROR nova.compute.manager [ 822.177902] env[62585]: Traceback (most recent call last): [ 822.177902] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 822.177902] env[62585]: listener.cb(fileno) [ 822.177902] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 822.177902] env[62585]: result = function(*args, **kwargs) [ 822.177902] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 822.177902] env[62585]: return func(*args, **kwargs) [ 822.177902] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 822.177902] env[62585]: raise e [ 822.177902] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.177902] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 822.177902] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.177902] env[62585]: created_port_ids = self._update_ports_for_instance( [ 822.177902] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.177902] env[62585]: with excutils.save_and_reraise_exception(): [ 822.177902] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.177902] env[62585]: self.force_reraise() [ 822.177902] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.177902] env[62585]: raise self.value [ 822.177902] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.177902] env[62585]: updated_port = self._update_port( [ 822.177902] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.177902] env[62585]: _ensure_no_port_binding_failure(port) [ 822.177902] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.177902] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 822.179855] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 822.179855] env[62585]: Removing descriptor: 17 [ 822.191579] env[62585]: DEBUG nova.scheduler.client.report [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 822.397503] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 822.468731] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 822.498891] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.499183] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.499357] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.499547] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.499694] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.499928] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.500075] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.500249] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.500420] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.500583] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.500753] env[62585]: DEBUG nova.virt.hardware [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.501663] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1166cd-4b35-472e-88ac-a6524613534d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.509765] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2976eb-b5fc-4ffc-8fb8-6e0a4fde3d06 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.524120] env[62585]: ERROR nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Traceback (most recent call last): [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] yield resources [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self.driver.spawn(context, instance, image_meta, [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self._vmops.spawn(context, instance, image_meta, injected_files, [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] vm_ref = self.build_virtual_machine(instance, [ 822.524120] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] vif_infos = vmwarevif.get_vif_info(self._session, [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] for vif in network_info: [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] return self._sync_wrapper(fn, *args, **kwargs) [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self.wait() [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self[:] = self._gt.wait() [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] return self._exit_event.wait() [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 822.524436] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] current.throw(*self._exc) [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] result = function(*args, **kwargs) [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] return func(*args, **kwargs) [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] raise e [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] nwinfo = self.network_api.allocate_for_instance( [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] created_port_ids = self._update_ports_for_instance( [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] with excutils.save_and_reraise_exception(): [ 822.524776] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self.force_reraise() [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] raise self.value [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] updated_port = self._update_port( [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] _ensure_no_port_binding_failure(port) [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] raise exception.PortBindingFailed(port_id=port['id']) [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 822.525197] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] [ 822.525197] env[62585]: INFO nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Terminating instance [ 822.526612] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.660010] env[62585]: DEBUG nova.network.neutron [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.697142] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.697586] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 822.706893] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.576s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.708490] env[62585]: INFO nova.compute.claims [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.766016] env[62585]: DEBUG nova.network.neutron [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.888117] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.888117] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.888117] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 822.888117] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Rebuilding the list of instances to heal {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 822.920161] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.218736] env[62585]: DEBUG nova.compute.utils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.220375] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 823.220822] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.264624] env[62585]: DEBUG nova.policy [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28834cc42f8a49cebca5647badabf8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c49ab537d42244f495aaa3cbdaafc6b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 823.268658] env[62585]: DEBUG oslo_concurrency.lockutils [req-483fe20d-0561-41d7-9cb9-7e00f62c4acd req-d64e0cf0-7f45-4d34-b961-49610b16a51d service nova] Releasing lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.269772] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.269772] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.393019] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 823.393019] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 823.393019] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 823.393019] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 823.393019] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 823.393019] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 823.393295] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Didn't find any instances for network info cache update. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 823.393295] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.393295] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.393295] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.393295] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.393295] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.393477] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.393477] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 823.393706] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.534018] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Successfully created port: 25f099b0-6edc-4046-90dd-9447559a5a03 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.726634] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 823.754026] env[62585]: DEBUG nova.scheduler.client.report [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Refreshing inventories for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 823.771614] env[62585]: DEBUG nova.scheduler.client.report [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Updating ProviderTree inventory for provider 66db9ec1-b5c3-45d2-a885-8e338110656b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 823.771842] env[62585]: DEBUG nova.compute.provider_tree [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.788728] env[62585]: DEBUG nova.scheduler.client.report [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Refreshing aggregate associations for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b, aggregates: None {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 823.796140] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.814442] env[62585]: DEBUG nova.scheduler.client.report [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Refreshing trait associations for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 823.897103] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.897720] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.992623] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f599397-8387-4790-b820-7cc5ad2ada20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.000124] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f56fd7-6b16-49f8-987f-8c8ba8f6c4f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.028728] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765e59f9-70c3-41f1-ae64-a0f1b79e7c0d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.036022] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdca222e-9a60-4ecc-be2c-866b13ed75f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.053033] env[62585]: DEBUG nova.compute.provider_tree [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.157876] env[62585]: DEBUG nova.compute.manager [req-ce8d8ac3-1f96-4780-bf25-91ed4916deb4 req-85327dfa-d14a-4d19-b96a-22f9b61c3eba service nova] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Received event network-vif-deleted-9a870b5d-e28e-4aec-a108-6a7e9978d6d5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.401098] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.401098] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 824.401098] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 824.401098] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a486e27b-6f3a-4708-80b6-1de8df2f9dbe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.409577] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e8657b-4db7-4b4e-bacc-0b7e598357ec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.431936] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d207fb66-ad23-47a5-a304-ecf885de4ced could not be found. [ 824.432174] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.432354] env[62585]: INFO nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Took 0.03 seconds to destroy the instance on the hypervisor. [ 824.432597] env[62585]: DEBUG oslo.service.loopingcall [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.432818] env[62585]: DEBUG nova.compute.manager [-] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 824.432911] env[62585]: DEBUG nova.network.neutron [-] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.447303] env[62585]: DEBUG nova.network.neutron [-] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.554195] env[62585]: DEBUG nova.scheduler.client.report [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 824.735266] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 824.760613] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 824.760922] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 824.761037] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 824.761226] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 824.761370] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 824.761515] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 824.761723] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 824.761879] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 824.762065] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 824.762233] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 824.762403] env[62585]: DEBUG nova.virt.hardware [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 824.763252] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0d90e3-0fe6-4846-9da2-d70cafb9273f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.771191] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac619948-dbc3-4012-b200-61296dcac2bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.949427] env[62585]: DEBUG nova.network.neutron [-] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.059393] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.059924] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 825.062844] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.907s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.064407] env[62585]: INFO nova.compute.claims [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.262170] env[62585]: ERROR nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 825.262170] env[62585]: ERROR nova.compute.manager Traceback (most recent call last): [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.262170] env[62585]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.262170] env[62585]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.262170] env[62585]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.262170] env[62585]: ERROR nova.compute.manager self.force_reraise() [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.262170] env[62585]: ERROR nova.compute.manager raise self.value [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.262170] env[62585]: ERROR nova.compute.manager updated_port = self._update_port( [ 825.262170] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.262170] env[62585]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 825.262823] env[62585]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.262823] env[62585]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 825.262823] env[62585]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 825.262823] env[62585]: ERROR nova.compute.manager [ 825.262823] env[62585]: Traceback (most recent call last): [ 825.262823] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 825.262823] env[62585]: listener.cb(fileno) [ 825.262823] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.262823] env[62585]: result = function(*args, **kwargs) [ 825.262823] env[62585]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 825.262823] env[62585]: return func(*args, **kwargs) [ 825.262823] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 825.262823] env[62585]: raise e [ 825.262823] env[62585]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.262823] env[62585]: nwinfo = self.network_api.allocate_for_instance( [ 825.262823] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.262823] env[62585]: created_port_ids = self._update_ports_for_instance( [ 825.262823] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.262823] env[62585]: with excutils.save_and_reraise_exception(): [ 825.262823] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.262823] env[62585]: self.force_reraise() [ 825.262823] env[62585]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.262823] env[62585]: raise self.value [ 825.262823] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.262823] env[62585]: updated_port = self._update_port( [ 825.262823] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.262823] env[62585]: _ensure_no_port_binding_failure(port) [ 825.262823] env[62585]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.262823] env[62585]: raise exception.PortBindingFailed(port_id=port['id']) [ 825.263993] env[62585]: nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 825.263993] env[62585]: Removing descriptor: 17 [ 825.263993] env[62585]: ERROR nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Traceback (most recent call last): [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] yield resources [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self.driver.spawn(context, instance, image_meta, [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 825.263993] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] vm_ref = self.build_virtual_machine(instance, [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] vif_infos = vmwarevif.get_vif_info(self._session, [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] for vif in network_info: [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return self._sync_wrapper(fn, *args, **kwargs) [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self.wait() [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self[:] = self._gt.wait() [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return self._exit_event.wait() [ 825.264471] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] result = hub.switch() [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return self.greenlet.switch() [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] result = function(*args, **kwargs) [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return func(*args, **kwargs) [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] raise e [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] nwinfo = self.network_api.allocate_for_instance( [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.264980] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] created_port_ids = self._update_ports_for_instance( [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] with excutils.save_and_reraise_exception(): [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self.force_reraise() [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] raise self.value [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] updated_port = self._update_port( [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] _ensure_no_port_binding_failure(port) [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.265476] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] raise exception.PortBindingFailed(port_id=port['id']) [ 825.265810] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 825.265810] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] [ 825.265810] env[62585]: INFO nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Terminating instance [ 825.268871] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.269038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.269210] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.451813] env[62585]: INFO nova.compute.manager [-] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Took 1.02 seconds to deallocate network for instance. [ 825.456085] env[62585]: DEBUG nova.compute.claims [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 825.459018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.569212] env[62585]: DEBUG nova.compute.utils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.576560] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 825.577850] env[62585]: DEBUG nova.network.neutron [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.641458] env[62585]: DEBUG nova.policy [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4428eefffda84fd18792c2bd26c4c861', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36c76fd292d84bbe97c7221e75831fbb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.788163] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.869233] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.984253] env[62585]: DEBUG nova.network.neutron [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Successfully created port: 597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.077824] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 826.190125] env[62585]: DEBUG nova.compute.manager [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Received event network-changed-25f099b0-6edc-4046-90dd-9447559a5a03 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 826.190125] env[62585]: DEBUG nova.compute.manager [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Refreshing instance network info cache due to event network-changed-25f099b0-6edc-4046-90dd-9447559a5a03. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 826.190978] env[62585]: DEBUG oslo_concurrency.lockutils [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] Acquiring lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.296422] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa973b3-4165-452d-8962-496f17f65bc6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.302253] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5da47c-c45b-42ee-9331-116b2cd5cf10 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.333720] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2035f648-057b-4c7e-8152-b54b4d0f7338 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.340828] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42e317b-597c-4696-9b97-0ca0f1b47a9b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.353241] env[62585]: DEBUG nova.compute.provider_tree [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.370362] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.370803] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 826.371010] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.371329] env[62585]: DEBUG oslo_concurrency.lockutils [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] Acquired lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.371535] env[62585]: DEBUG nova.network.neutron [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Refreshing network info cache for port 25f099b0-6edc-4046-90dd-9447559a5a03 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.372538] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4135f40-62ec-4653-b812-7cf2b9f22e08 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.383014] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fee90e-dd1a-44a3-9033-d1cd72905145 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.404356] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e89b55d3-aa15-4d28-ba80-fe3b45ee289f could not be found. [ 826.404569] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.404772] env[62585]: INFO nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 826.405025] env[62585]: DEBUG oslo.service.loopingcall [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.405250] env[62585]: DEBUG nova.compute.manager [-] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.405342] env[62585]: DEBUG nova.network.neutron [-] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 826.420381] env[62585]: DEBUG nova.network.neutron [-] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.856036] env[62585]: DEBUG nova.scheduler.client.report [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 826.894874] env[62585]: DEBUG nova.network.neutron [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.922439] env[62585]: DEBUG nova.network.neutron [-] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.011047] env[62585]: DEBUG nova.network.neutron [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.090468] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 827.115470] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.115712] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.115865] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.116049] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.116197] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.116344] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.116545] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.116700] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.116860] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.117024] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.117197] env[62585]: DEBUG nova.virt.hardware [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.118048] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac872c4a-2b8f-4fd2-8448-719aaf854542 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.127048] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94608fc4-009d-4334-ad55-d0e04ab9c7a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.361475] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.362102] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 827.364847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.787s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.425247] env[62585]: INFO nova.compute.manager [-] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Took 1.02 seconds to deallocate network for instance. [ 827.427675] env[62585]: DEBUG nova.compute.claims [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Aborting claim: {{(pid=62585) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 827.427855] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.513352] env[62585]: DEBUG oslo_concurrency.lockutils [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] Releasing lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.513562] env[62585]: DEBUG nova.compute.manager [req-f00297c5-1127-4148-a435-0f48c754420f req-883a9e6d-838e-4c53-8155-d2343cbb4823 service nova] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Received event network-vif-deleted-25f099b0-6edc-4046-90dd-9447559a5a03 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.880657] env[62585]: DEBUG nova.compute.utils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.884700] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 827.884931] env[62585]: DEBUG nova.network.neutron [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.932100] env[62585]: DEBUG nova.policy [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '846fa76ac8244bc4a9a0d444c4af0d3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9645866ca8f0433cae30cf5867244ca8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 828.057568] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e283bbef-29a2-4268-89f0-defe16087158 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.065810] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21c0a1f-5a43-4c48-9b02-5fdc09e9157e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.096410] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f78f17-edf8-4440-b8ff-3bb94c7bfb69 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.104145] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f4ea86-bb00-4742-a018-df61edf3351c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.117472] env[62585]: DEBUG nova.compute.provider_tree [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.237610] env[62585]: DEBUG nova.network.neutron [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Successfully created port: bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.321792] env[62585]: DEBUG nova.compute.manager [req-ee57fceb-c5e5-4fe9-8551-4ea08b5b9549 req-48568b61-9a95-4b46-9694-17abe9dce53b service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Received event network-vif-plugged-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 828.322087] env[62585]: DEBUG oslo_concurrency.lockutils [req-ee57fceb-c5e5-4fe9-8551-4ea08b5b9549 req-48568b61-9a95-4b46-9694-17abe9dce53b service nova] Acquiring lock "8763a058-b453-4f03-9532-7d7e65efdfb2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.322305] env[62585]: DEBUG oslo_concurrency.lockutils [req-ee57fceb-c5e5-4fe9-8551-4ea08b5b9549 req-48568b61-9a95-4b46-9694-17abe9dce53b service nova] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.322472] env[62585]: DEBUG oslo_concurrency.lockutils [req-ee57fceb-c5e5-4fe9-8551-4ea08b5b9549 req-48568b61-9a95-4b46-9694-17abe9dce53b service nova] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.322671] env[62585]: DEBUG nova.compute.manager [req-ee57fceb-c5e5-4fe9-8551-4ea08b5b9549 req-48568b61-9a95-4b46-9694-17abe9dce53b service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] No waiting events found dispatching network-vif-plugged-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 828.322791] env[62585]: WARNING nova.compute.manager [req-ee57fceb-c5e5-4fe9-8551-4ea08b5b9549 req-48568b61-9a95-4b46-9694-17abe9dce53b service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Received unexpected event network-vif-plugged-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 for instance with vm_state building and task_state spawning. [ 828.391606] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 828.513206] env[62585]: DEBUG nova.network.neutron [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Successfully updated port: 597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.620756] env[62585]: DEBUG nova.scheduler.client.report [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.015424] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.015687] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.015687] env[62585]: DEBUG nova.network.neutron [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.129198] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.764s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.129921] env[62585]: ERROR nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Traceback (most recent call last): [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self.driver.spawn(context, instance, image_meta, [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] vm_ref = self.build_virtual_machine(instance, [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] vif_infos = vmwarevif.get_vif_info(self._session, [ 829.129921] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] for vif in network_info: [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] return self._sync_wrapper(fn, *args, **kwargs) [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self.wait() [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self[:] = self._gt.wait() [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] return self._exit_event.wait() [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] current.throw(*self._exc) [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 829.130251] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] result = function(*args, **kwargs) [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] return func(*args, **kwargs) [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] raise e [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] nwinfo = self.network_api.allocate_for_instance( [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] created_port_ids = self._update_ports_for_instance( [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] with excutils.save_and_reraise_exception(): [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] self.force_reraise() [ 829.130556] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] raise self.value [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] updated_port = self._update_port( [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] _ensure_no_port_binding_failure(port) [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] raise exception.PortBindingFailed(port_id=port['id']) [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] nova.exception.PortBindingFailed: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. [ 829.130869] env[62585]: ERROR nova.compute.manager [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] [ 829.130869] env[62585]: DEBUG nova.compute.utils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 829.131814] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.053s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.134551] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Build of instance aed35d7d-f826-4601-aa4e-1d1dccd51d3a was re-scheduled: Binding failed for port 66d937d1-8514-49f1-9152-3881137092dd, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 829.135391] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 829.135391] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.135511] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.137305] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.402709] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 829.431667] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.433885] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.434061] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.434256] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.434402] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.434688] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.434815] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.435286] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.435286] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.435286] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.435467] env[62585]: DEBUG nova.virt.hardware [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.436731] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06332c7d-50b3-4e84-bb67-a9f1eeb713fd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.445075] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6f5cde-20ec-4927-900c-3d0b85d4532f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.569794] env[62585]: DEBUG nova.network.neutron [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.667344] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.798065] env[62585]: DEBUG nova.network.neutron [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [{"id": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "address": "fa:16:3e:01:b1:72", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap597e2cc3-d0", "ovs_interfaceid": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.844936] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b7a734-8b52-419f-8483-0a649694f7db {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.854110] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f308443a-0de5-4918-af25-9841daf23c03 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.882062] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddf856c-7a4e-43e6-99bb-3cf8b16bb4bf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.889834] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364f191b-503b-4b6a-be15-4f359f8f7be4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.902816] env[62585]: DEBUG nova.compute.provider_tree [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.055940] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.100858] env[62585]: DEBUG nova.compute.manager [req-9b5c6c08-73e3-4b0b-97bc-c82623349b09 req-3d591265-fdef-4e27-a94b-95cdf42512c5 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-vif-plugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.101093] env[62585]: DEBUG oslo_concurrency.lockutils [req-9b5c6c08-73e3-4b0b-97bc-c82623349b09 req-3d591265-fdef-4e27-a94b-95cdf42512c5 service nova] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.101299] env[62585]: DEBUG oslo_concurrency.lockutils [req-9b5c6c08-73e3-4b0b-97bc-c82623349b09 req-3d591265-fdef-4e27-a94b-95cdf42512c5 service nova] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.101459] env[62585]: DEBUG oslo_concurrency.lockutils [req-9b5c6c08-73e3-4b0b-97bc-c82623349b09 req-3d591265-fdef-4e27-a94b-95cdf42512c5 service nova] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.101805] env[62585]: DEBUG nova.compute.manager [req-9b5c6c08-73e3-4b0b-97bc-c82623349b09 req-3d591265-fdef-4e27-a94b-95cdf42512c5 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] No waiting events found dispatching network-vif-plugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 830.101805] env[62585]: WARNING nova.compute.manager [req-9b5c6c08-73e3-4b0b-97bc-c82623349b09 req-3d591265-fdef-4e27-a94b-95cdf42512c5 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received unexpected event network-vif-plugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 for instance with vm_state building and task_state spawning. [ 830.215191] env[62585]: DEBUG nova.network.neutron [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Successfully updated port: bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.301069] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.301422] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Instance network_info: |[{"id": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "address": "fa:16:3e:01:b1:72", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap597e2cc3-d0", "ovs_interfaceid": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 830.301943] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:b1:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '597e2cc3-d043-4c6b-a254-2d9838a1ebf9', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.310452] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Creating folder: Project (36c76fd292d84bbe97c7221e75831fbb). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.310713] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d437127-449c-4786-b5c0-ab361814f39d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.323601] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Created folder: Project (36c76fd292d84bbe97c7221e75831fbb) in parent group-v293962. [ 830.323786] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Creating folder: Instances. Parent ref: group-v293988. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.324015] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f8244cc-8f16-4424-9164-4b08f9e238fa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.332719] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Created folder: Instances in parent group-v293988. [ 830.332936] env[62585]: DEBUG oslo.service.loopingcall [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.333136] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 830.333335] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e16268c0-4b66-4b5b-91f7-375994eb8bff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.351699] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.351699] env[62585]: value = "task-1384696" [ 830.351699] env[62585]: _type = "Task" [ 830.351699] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.359551] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384696, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.406758] env[62585]: DEBUG nova.scheduler.client.report [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.463102] env[62585]: DEBUG nova.compute.manager [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Received event network-changed-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.463313] env[62585]: DEBUG nova.compute.manager [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Refreshing instance network info cache due to event network-changed-597e2cc3-d043-4c6b-a254-2d9838a1ebf9. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 830.463540] env[62585]: DEBUG oslo_concurrency.lockutils [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] Acquiring lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.463685] env[62585]: DEBUG oslo_concurrency.lockutils [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] Acquired lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.463852] env[62585]: DEBUG nova.network.neutron [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Refreshing network info cache for port 597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.558206] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-aed35d7d-f826-4601-aa4e-1d1dccd51d3a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.559179] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 830.559643] env[62585]: DEBUG nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.559643] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.582385] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.715438] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.715603] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.715761] env[62585]: DEBUG nova.network.neutron [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.863574] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384696, 'name': CreateVM_Task, 'duration_secs': 0.304031} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.863574] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.873486] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.873590] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.874479] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 830.874479] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed3e5c8-072d-4ca1-9d86-ec49db9d0a23 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.878912] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 830.878912] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e4b345-7045-a9e2-bb7a-4585e3b0e585" [ 830.878912] env[62585]: _type = "Task" [ 830.878912] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.886615] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e4b345-7045-a9e2-bb7a-4585e3b0e585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.911593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.780s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.912230] env[62585]: ERROR nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Traceback (most recent call last): [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self.driver.spawn(context, instance, image_meta, [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] vm_ref = self.build_virtual_machine(instance, [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.912230] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] for vif in network_info: [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] return self._sync_wrapper(fn, *args, **kwargs) [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self.wait() [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self[:] = self._gt.wait() [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] return self._exit_event.wait() [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] current.throw(*self._exc) [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.912570] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] result = function(*args, **kwargs) [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] return func(*args, **kwargs) [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] raise e [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] nwinfo = self.network_api.allocate_for_instance( [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] created_port_ids = self._update_ports_for_instance( [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] with excutils.save_and_reraise_exception(): [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] self.force_reraise() [ 830.912929] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] raise self.value [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] updated_port = self._update_port( [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] _ensure_no_port_binding_failure(port) [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] raise exception.PortBindingFailed(port_id=port['id']) [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] nova.exception.PortBindingFailed: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. [ 830.913316] env[62585]: ERROR nova.compute.manager [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] [ 830.913316] env[62585]: DEBUG nova.compute.utils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 830.914200] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.657s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.915631] env[62585]: INFO nova.compute.claims [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.918424] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Build of instance 6483148a-b53d-46b9-8926-07b628f2ea3b was re-scheduled: Binding failed for port 52853daf-8aeb-4e00-b8de-4d44b42e6529, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 830.919017] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 830.919261] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.919412] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.919707] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.085416] env[62585]: DEBUG nova.network.neutron [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.229592] env[62585]: DEBUG nova.network.neutron [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updated VIF entry in instance network info cache for port 597e2cc3-d043-4c6b-a254-2d9838a1ebf9. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.231107] env[62585]: DEBUG nova.network.neutron [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [{"id": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "address": "fa:16:3e:01:b1:72", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap597e2cc3-d0", "ovs_interfaceid": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.252974] env[62585]: DEBUG nova.network.neutron [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.389034] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e4b345-7045-a9e2-bb7a-4585e3b0e585, 'name': SearchDatastore_Task, 'duration_secs': 0.010567} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.389345] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.389575] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.389806] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.389949] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.390141] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.390398] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eaf11e02-3a15-4fd6-8307-cd2eefc0f7ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.398642] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.398642] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.399749] env[62585]: DEBUG nova.network.neutron [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.400819] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eca29146-760c-45ea-a15e-de7012a22c82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.405822] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 831.405822] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52710d8c-26f2-cc79-8fb4-b26746974222" [ 831.405822] env[62585]: _type = "Task" [ 831.405822] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.413654] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52710d8c-26f2-cc79-8fb4-b26746974222, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.445975] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.551056] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.590714] env[62585]: INFO nova.compute.manager [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: aed35d7d-f826-4601-aa4e-1d1dccd51d3a] Took 1.03 seconds to deallocate network for instance. [ 831.733164] env[62585]: DEBUG oslo_concurrency.lockutils [req-ecfd770a-7861-4aac-85f2-91ef247f2f0f req-14d15afa-d064-46a7-8da3-09c96e5f2c93 service nova] Releasing lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.903940] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.904368] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance network_info: |[{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 831.904793] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:3b:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc3d19ab-ba98-4935-9e08-61c5df21be43', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.912263] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating folder: Project (9645866ca8f0433cae30cf5867244ca8). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.913028] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd30fff5-d57c-42dc-830b-dc56ba62a8d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.924497] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52710d8c-26f2-cc79-8fb4-b26746974222, 'name': SearchDatastore_Task, 'duration_secs': 0.008236} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.929187] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03298d1c-bd3e-4760-8f1e-9588d69ad064 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.931354] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Created folder: Project (9645866ca8f0433cae30cf5867244ca8) in parent group-v293962. [ 831.931585] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating folder: Instances. Parent ref: group-v293991. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.932038] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcb8790e-e647-4e06-a3c3-07de84d26f2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.936970] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 831.936970] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2af47-e8c8-4f45-8041-2db3560ca9d2" [ 831.936970] env[62585]: _type = "Task" [ 831.936970] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.941851] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Created folder: Instances in parent group-v293991. [ 831.942105] env[62585]: DEBUG oslo.service.loopingcall [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.942649] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.942888] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0db4d507-d5a7-45f2-ad8b-364d1122e08b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.964436] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2af47-e8c8-4f45-8041-2db3560ca9d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.970155] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.970155] env[62585]: value = "task-1384699" [ 831.970155] env[62585]: _type = "Task" [ 831.970155] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.977072] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384699, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.053756] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "refresh_cache-6483148a-b53d-46b9-8926-07b628f2ea3b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.054015] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 832.054212] env[62585]: DEBUG nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.054379] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.077244] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.103955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d620fff-2021-4e9a-a281-3ed8030d83f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.111173] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2971e650-fd60-44b6-9057-05724d02cc04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.148083] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d711e796-7ef6-400c-bbc3-fbfbd76103df {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.154074] env[62585]: DEBUG nova.compute.manager [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.154074] env[62585]: DEBUG nova.compute.manager [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing instance network info cache due to event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 832.154074] env[62585]: DEBUG oslo_concurrency.lockutils [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.154074] env[62585]: DEBUG oslo_concurrency.lockutils [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.154074] env[62585]: DEBUG nova.network.neutron [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.160685] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cacdabd-a1fe-44e6-bfc0-b161f72942c8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.176892] env[62585]: DEBUG nova.compute.provider_tree [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.450339] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2af47-e8c8-4f45-8041-2db3560ca9d2, 'name': SearchDatastore_Task, 'duration_secs': 0.00933} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.450339] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.450339] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 8763a058-b453-4f03-9532-7d7e65efdfb2/8763a058-b453-4f03-9532-7d7e65efdfb2.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.450339] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7db139bc-0b9a-4e66-99d3-867bde1da4e6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.455091] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 832.455091] env[62585]: value = "task-1384700" [ 832.455091] env[62585]: _type = "Task" [ 832.455091] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.462746] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.478906] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384699, 'name': CreateVM_Task, 'duration_secs': 0.288787} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.479091] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.479760] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.479941] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.480282] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.480532] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ffd5cda-2486-427c-9d1d-f05e3a103429 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.484491] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 832.484491] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52006710-beb5-50ce-164e-d95f7b869f3a" [ 832.484491] env[62585]: _type = "Task" [ 832.484491] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.491903] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52006710-beb5-50ce-164e-d95f7b869f3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.579820] env[62585]: DEBUG nova.network.neutron [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.635918] env[62585]: INFO nova.scheduler.client.report [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleted allocations for instance aed35d7d-f826-4601-aa4e-1d1dccd51d3a [ 832.680213] env[62585]: DEBUG nova.scheduler.client.report [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.965092] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449112} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.965325] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 8763a058-b453-4f03-9532-7d7e65efdfb2/8763a058-b453-4f03-9532-7d7e65efdfb2.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.965539] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.965785] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-881a9916-49d9-427c-9059-02cc1b334729 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.973170] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 832.973170] env[62585]: value = "task-1384701" [ 832.973170] env[62585]: _type = "Task" [ 832.973170] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.981973] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.997590] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52006710-beb5-50ce-164e-d95f7b869f3a, 'name': SearchDatastore_Task, 'duration_secs': 0.007623} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.998915] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.999179] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.999431] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.999732] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.999732] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.000007] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd1a6982-fe9f-4a16-a160-0a460623739b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.008722] env[62585]: DEBUG nova.network.neutron [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updated VIF entry in instance network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.009113] env[62585]: DEBUG nova.network.neutron [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.011685] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.011859] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.012880] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d87e12d4-1688-46ce-8c91-8eb238333283 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.018089] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 833.018089] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2b326-dd14-acab-02e5-bd0dcc23298e" [ 833.018089] env[62585]: _type = "Task" [ 833.018089] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.025664] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2b326-dd14-acab-02e5-bd0dcc23298e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.084760] env[62585]: INFO nova.compute.manager [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 6483148a-b53d-46b9-8926-07b628f2ea3b] Took 1.03 seconds to deallocate network for instance. [ 833.146950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-389029aa-8016-4796-b8d2-f6f420bcdebc tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "aed35d7d-f826-4601-aa4e-1d1dccd51d3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.696s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.186213] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.186747] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 833.189265] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.207s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.190649] env[62585]: INFO nova.compute.claims [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.483014] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063041} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.483164] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.483918] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4bb777-80f1-4f80-a891-b895a93de388 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.506337] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 8763a058-b453-4f03-9532-7d7e65efdfb2/8763a058-b453-4f03-9532-7d7e65efdfb2.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.506617] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30196b01-4351-4d95-ae2b-cd6d20b3dc85 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.521109] env[62585]: DEBUG oslo_concurrency.lockutils [req-88c66a36-d81a-4971-a3bb-0b82d48a84c6 req-366c5008-aaf6-476f-91ef-59835da3ffdd service nova] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.530836] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2b326-dd14-acab-02e5-bd0dcc23298e, 'name': SearchDatastore_Task, 'duration_secs': 0.00821} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.532600] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 833.532600] env[62585]: value = "task-1384702" [ 833.532600] env[62585]: _type = "Task" [ 833.532600] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.532806] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46d46bf0-f18e-40d9-a432-f8fc920fd39a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.540306] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 833.540306] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525157a2-9200-6e9b-39e1-7c9a0561a76c" [ 833.540306] env[62585]: _type = "Task" [ 833.540306] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.543372] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384702, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.550377] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525157a2-9200-6e9b-39e1-7c9a0561a76c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.649488] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 833.695459] env[62585]: DEBUG nova.compute.utils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.699206] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 833.699294] env[62585]: DEBUG nova.network.neutron [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 833.761550] env[62585]: DEBUG nova.policy [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b596d143eaf450e97e982b0d4ff1b50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34c6f21d288e47dd94ccbe12526fe4e8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 834.047492] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384702, 'name': ReconfigVM_Task, 'duration_secs': 0.261508} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.051125] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 8763a058-b453-4f03-9532-7d7e65efdfb2/8763a058-b453-4f03-9532-7d7e65efdfb2.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.051887] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4f50e74-5a65-4557-a327-d18ecc9ac1e0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.054786] env[62585]: DEBUG nova.network.neutron [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Successfully created port: c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.061530] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525157a2-9200-6e9b-39e1-7c9a0561a76c, 'name': SearchDatastore_Task, 'duration_secs': 0.009175} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.062708] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.062963] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.063282] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 834.063282] env[62585]: value = "task-1384703" [ 834.063282] env[62585]: _type = "Task" [ 834.063282] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.063530] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a44eee04-9a4d-4846-a912-83e563f52769 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.072766] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384703, 'name': Rename_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.073963] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 834.073963] env[62585]: value = "task-1384704" [ 834.073963] env[62585]: _type = "Task" [ 834.073963] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.130028] env[62585]: INFO nova.scheduler.client.report [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Deleted allocations for instance 6483148a-b53d-46b9-8926-07b628f2ea3b [ 834.180816] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.198323] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.404689] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83afcc86-45b0-4311-85c3-9c0642654c2c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.413254] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e34ae7-e386-4c8f-b5a7-f011721dbc4b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.447677] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa5fc36-71c5-49b2-912b-3e275f8276e4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.455999] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d43451-86b9-42da-9ed4-4b980184f0ad {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.469702] env[62585]: DEBUG nova.compute.provider_tree [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.575859] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384703, 'name': Rename_Task, 'duration_secs': 0.144258} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.578713] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.578943] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67fb2161-7e10-426f-bd7a-c2e59a0a1f39 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.584704] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473734} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.585807] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.586033] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.586314] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 834.586314] env[62585]: value = "task-1384705" [ 834.586314] env[62585]: _type = "Task" [ 834.586314] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.586523] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a49cd46d-2cf7-407b-8494-8f1b30a32649 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.595226] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.596388] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 834.596388] env[62585]: value = "task-1384706" [ 834.596388] env[62585]: _type = "Task" [ 834.596388] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.603564] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.640744] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2623696-b6e4-4dd5-87e9-7e624f29e803 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "6483148a-b53d-46b9-8926-07b628f2ea3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.141s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.974267] env[62585]: DEBUG nova.scheduler.client.report [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.097659] env[62585]: DEBUG oslo_vmware.api [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384705, 'name': PowerOnVM_Task, 'duration_secs': 0.454938} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.100675] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.100889] env[62585]: INFO nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Took 8.01 seconds to spawn the instance on the hypervisor. [ 835.101079] env[62585]: DEBUG nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 835.101789] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e27531e-5e59-492b-8cc8-47d34710d18e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.108690] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065432} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.110115] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.113589] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361e8b89-43ee-45ff-843b-3697268021c0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.134635] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.134895] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1e94675-ea0f-4c43-be98-610f7522eff6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.149308] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.157128] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 835.157128] env[62585]: value = "task-1384707" [ 835.157128] env[62585]: _type = "Task" [ 835.157128] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.165346] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384707, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.208932] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.230027] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.230282] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.230435] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.230617] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.230772] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.230971] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.231206] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.231366] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.231528] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.231759] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.231876] env[62585]: DEBUG nova.virt.hardware [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.232790] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7c0377-2e5a-4ec4-abd8-bdb8aeb81483 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.241875] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510b8e67-d662-47cf-b078-8bc612a71e5e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.334058] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.334297] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "a634a80e-d90a-4ce3-8233-75657a7754be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.479974] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.480521] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.488855] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.126s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.632650] env[62585]: INFO nova.compute.manager [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Took 26.52 seconds to build instance. [ 835.669486] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384707, 'name': ReconfigVM_Task, 'duration_secs': 0.258623} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.669765] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.670512] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8022d09f-3695-4d6e-964a-523872fedaf1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.676978] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 835.676978] env[62585]: value = "task-1384708" [ 835.676978] env[62585]: _type = "Task" [ 835.676978] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.677918] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.687666] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384708, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.806017] env[62585]: DEBUG nova.compute.manager [req-788f9408-de12-45b3-88a4-049e6421be54 req-1d5158e2-e81c-4f47-8b9f-4c36ec7842cc service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-vif-plugged-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.806382] env[62585]: DEBUG oslo_concurrency.lockutils [req-788f9408-de12-45b3-88a4-049e6421be54 req-1d5158e2-e81c-4f47-8b9f-4c36ec7842cc service nova] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.806658] env[62585]: DEBUG oslo_concurrency.lockutils [req-788f9408-de12-45b3-88a4-049e6421be54 req-1d5158e2-e81c-4f47-8b9f-4c36ec7842cc service nova] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.806926] env[62585]: DEBUG oslo_concurrency.lockutils [req-788f9408-de12-45b3-88a4-049e6421be54 req-1d5158e2-e81c-4f47-8b9f-4c36ec7842cc service nova] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.806998] env[62585]: DEBUG nova.compute.manager [req-788f9408-de12-45b3-88a4-049e6421be54 req-1d5158e2-e81c-4f47-8b9f-4c36ec7842cc service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] No waiting events found dispatching network-vif-plugged-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 835.807740] env[62585]: WARNING nova.compute.manager [req-788f9408-de12-45b3-88a4-049e6421be54 req-1d5158e2-e81c-4f47-8b9f-4c36ec7842cc service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received unexpected event network-vif-plugged-c32c8966-edf9-44a6-9263-00c85e124ab0 for instance with vm_state building and task_state spawning. [ 835.994442] env[62585]: DEBUG nova.compute.utils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.004019] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 836.004019] env[62585]: DEBUG nova.network.neutron [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.075694] env[62585]: DEBUG nova.policy [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01befe1db3684d60943c74da2c2c9fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f00751679b29472e9ab92c9e48a99925', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.135777] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bcae34ef-ea27-46c4-bb73-acf4f4002364 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.089s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.155296] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f74cab7-84ed-43ff-b03f-61d7c359fc85 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.163387] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33299457-380e-4ac9-ab6e-8c95561ab923 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.203031] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b40196-fe64-4e73-9c29-84fba2e2d7cc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.212947] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384708, 'name': Rename_Task, 'duration_secs': 0.150092} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.212947] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.214924] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9459976d-25de-437e-9086-084e51b077ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.218754] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d02ba9f2-c362-44d0-948f-06c05956eeb0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.235539] env[62585]: DEBUG nova.compute.provider_tree [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.237139] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 836.237139] env[62585]: value = "task-1384709" [ 836.237139] env[62585]: _type = "Task" [ 836.237139] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.248187] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384709, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.299453] env[62585]: DEBUG nova.compute.manager [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Received event network-changed-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.299652] env[62585]: DEBUG nova.compute.manager [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Refreshing instance network info cache due to event network-changed-597e2cc3-d043-4c6b-a254-2d9838a1ebf9. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 836.299990] env[62585]: DEBUG oslo_concurrency.lockutils [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] Acquiring lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.300169] env[62585]: DEBUG oslo_concurrency.lockutils [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] Acquired lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.300484] env[62585]: DEBUG nova.network.neutron [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Refreshing network info cache for port 597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.488098] env[62585]: DEBUG nova.network.neutron [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Successfully updated port: c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.501094] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.582682] env[62585]: DEBUG nova.network.neutron [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Successfully created port: 812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.637464] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.738453] env[62585]: DEBUG nova.scheduler.client.report [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.751321] env[62585]: DEBUG oslo_vmware.api [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384709, 'name': PowerOnVM_Task, 'duration_secs': 0.457562} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.751578] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.751777] env[62585]: INFO nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Took 7.35 seconds to spawn the instance on the hypervisor. [ 836.751949] env[62585]: DEBUG nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.752720] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ab0471-1ea9-4fcc-8233-013e7a40782c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.992050] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.992260] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.992374] env[62585]: DEBUG nova.network.neutron [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.162241] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.192036] env[62585]: DEBUG nova.network.neutron [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updated VIF entry in instance network info cache for port 597e2cc3-d043-4c6b-a254-2d9838a1ebf9. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.192415] env[62585]: DEBUG nova.network.neutron [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [{"id": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "address": "fa:16:3e:01:b1:72", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap597e2cc3-d0", "ovs_interfaceid": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.245175] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.758s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.245175] env[62585]: ERROR nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Traceback (most recent call last): [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self.driver.spawn(context, instance, image_meta, [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self._vmops.spawn(context, instance, image_meta, injected_files, [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 837.245175] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] vm_ref = self.build_virtual_machine(instance, [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] vif_infos = vmwarevif.get_vif_info(self._session, [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] for vif in network_info: [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] return self._sync_wrapper(fn, *args, **kwargs) [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self.wait() [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self[:] = self._gt.wait() [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] return self._exit_event.wait() [ 837.245726] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] current.throw(*self._exc) [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] result = function(*args, **kwargs) [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] return func(*args, **kwargs) [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] raise e [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] nwinfo = self.network_api.allocate_for_instance( [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] created_port_ids = self._update_ports_for_instance( [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 837.246055] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] with excutils.save_and_reraise_exception(): [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] self.force_reraise() [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] raise self.value [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] updated_port = self._update_port( [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] _ensure_no_port_binding_failure(port) [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] raise exception.PortBindingFailed(port_id=port['id']) [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] nova.exception.PortBindingFailed: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. [ 837.246400] env[62585]: ERROR nova.compute.manager [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] [ 837.246726] env[62585]: DEBUG nova.compute.utils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 837.247413] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.328s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.249403] env[62585]: INFO nova.compute.claims [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.253086] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Build of instance e4373e2a-cc21-41b7-be28-9b140ab43247 was re-scheduled: Binding failed for port 6c3726d1-8ffe-420d-9985-5ef0a82289c4, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 837.253355] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 837.253760] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.253760] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.253868] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.277592] env[62585]: INFO nova.compute.manager [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Took 26.14 seconds to build instance. [ 837.518025] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.542977] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.543250] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.543407] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.543586] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.543731] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.543872] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.544088] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.544252] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.544474] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.544657] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.544735] env[62585]: DEBUG nova.virt.hardware [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.545638] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05cecc6-786d-46b9-a53c-4d81464e6d28 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.549065] env[62585]: DEBUG nova.network.neutron [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.556403] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973b2562-8afa-4d45-8682-1caa28771042 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.695347] env[62585]: DEBUG oslo_concurrency.lockutils [req-9a397dc6-8588-4f41-a0e5-3f58e453fd08 req-d8b7909d-6e4d-492f-a691-566cc0b60f28 service nova] Releasing lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.701323] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.705021] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.783042] env[62585]: DEBUG oslo_concurrency.lockutils [None req-105a22c9-3936-4d1a-a04c-c23dd98ab9e6 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.907s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.787218] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.791386] env[62585]: DEBUG nova.network.neutron [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.994226] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.270230] env[62585]: DEBUG nova.compute.manager [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.270534] env[62585]: DEBUG nova.compute.manager [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing instance network info cache due to event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.271278] env[62585]: DEBUG oslo_concurrency.lockutils [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.271278] env[62585]: DEBUG oslo_concurrency.lockutils [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.271278] env[62585]: DEBUG nova.network.neutron [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.285920] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 838.295998] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.296313] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance network_info: |[{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 838.296712] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:f5:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c32c8966-edf9-44a6-9263-00c85e124ab0', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.306577] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating folder: Project (34c6f21d288e47dd94ccbe12526fe4e8). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.310214] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-387228c7-a290-4434-86dd-89cfc8b2f109 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.322553] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Created folder: Project (34c6f21d288e47dd94ccbe12526fe4e8) in parent group-v293962. [ 838.322731] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating folder: Instances. Parent ref: group-v293994. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.325823] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b076fa1b-2295-4ab8-9cdd-27249107c7a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.336252] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Created folder: Instances in parent group-v293994. [ 838.336499] env[62585]: DEBUG oslo.service.loopingcall [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.336690] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.337367] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5666a42-b048-47a5-bb80-10ab850ac6bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.364348] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.364348] env[62585]: value = "task-1384712" [ 838.364348] env[62585]: _type = "Task" [ 838.364348] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.372128] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384712, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.411091] env[62585]: DEBUG nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-changed-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.411755] env[62585]: DEBUG nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Refreshing instance network info cache due to event network-changed-c32c8966-edf9-44a6-9263-00c85e124ab0. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.412190] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.412430] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.412533] env[62585]: DEBUG nova.network.neutron [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Refreshing network info cache for port c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.426714] env[62585]: DEBUG nova.network.neutron [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Successfully updated port: 812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.496674] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4a0415-bd83-44dd-9d69-f3ef819a3b2b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.499778] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-e4373e2a-cc21-41b7-be28-9b140ab43247" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.499997] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 838.500195] env[62585]: DEBUG nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 838.500362] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 838.507164] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ee9837-bb38-480a-be0f-7b13460d83ef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.540128] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.540128] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e6c661-4422-4cea-92b9-990d2a842834 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.551136] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b933c2f7-10e6-4d94-9b72-1d003f709f78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.566498] env[62585]: DEBUG nova.compute.provider_tree [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.804933] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.876328] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384712, 'name': CreateVM_Task, 'duration_secs': 0.350077} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.878678] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.879475] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.879653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.879986] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 838.880518] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-717265ae-6f7e-40bd-b351-4e1d7d7c49da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.884879] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 838.884879] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52984354-2692-732d-01b2-2e09e1659155" [ 838.884879] env[62585]: _type = "Task" [ 838.884879] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.892378] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52984354-2692-732d-01b2-2e09e1659155, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.928991] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "refresh_cache-679380d4-5b96-4c30-bac9-f7163f19c609" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.928991] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "refresh_cache-679380d4-5b96-4c30-bac9-f7163f19c609" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.928991] env[62585]: DEBUG nova.network.neutron [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.046679] env[62585]: DEBUG nova.network.neutron [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.070900] env[62585]: DEBUG nova.scheduler.client.report [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.080488] env[62585]: DEBUG nova.network.neutron [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updated VIF entry in instance network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.080852] env[62585]: DEBUG nova.network.neutron [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.185580] env[62585]: DEBUG nova.network.neutron [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updated VIF entry in instance network info cache for port c32c8966-edf9-44a6-9263-00c85e124ab0. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.185994] env[62585]: DEBUG nova.network.neutron [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.396180] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52984354-2692-732d-01b2-2e09e1659155, 'name': SearchDatastore_Task, 'duration_secs': 0.043118} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.396641] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.396896] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.397260] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.397402] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.397584] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.397945] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f37cf284-224d-4513-8ebb-7dd921b73dae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.407016] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.407225] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.407966] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93e5174a-8781-4258-bbdb-cccb3b6fd393 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.413790] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 839.413790] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b61de2-362f-ba68-9564-93abee7b7c99" [ 839.413790] env[62585]: _type = "Task" [ 839.413790] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.422118] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b61de2-362f-ba68-9564-93abee7b7c99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.537827] env[62585]: DEBUG nova.network.neutron [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.549595] env[62585]: INFO nova.compute.manager [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: e4373e2a-cc21-41b7-be28-9b140ab43247] Took 1.05 seconds to deallocate network for instance. [ 839.581547] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.581772] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 839.585053] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.687s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.585053] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.585249] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 839.585533] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.129s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.589277] env[62585]: DEBUG oslo_concurrency.lockutils [req-de1de14c-1a8c-4ee6-bb9d-5bf218536d4d req-9f677878-1643-4f66-9a53-e570402b5fa4 service nova] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.589380] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91a10ee-a764-46a7-be81-eab0c1e00ded {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.600994] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80d6903-af8d-47a1-9581-3eeb9a16005c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.617378] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b4d88f-a595-4d2a-ac82-db678649f133 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.626042] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadd4341-f403-4370-a977-9d699a54ba50 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.657171] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181322MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 839.657332] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.688865] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.689157] env[62585]: DEBUG nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Received event network-changed-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 839.689325] env[62585]: DEBUG nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Refreshing instance network info cache due to event network-changed-597e2cc3-d043-4c6b-a254-2d9838a1ebf9. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 839.689532] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Acquiring lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.689688] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Acquired lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.689979] env[62585]: DEBUG nova.network.neutron [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Refreshing network info cache for port 597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.739025] env[62585]: DEBUG nova.network.neutron [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Updating instance_info_cache with network_info: [{"id": "812678f9-ea0f-4419-9b6b-98690022e9cc", "address": "fa:16:3e:f9:7d:29", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap812678f9-ea", "ovs_interfaceid": "812678f9-ea0f-4419-9b6b-98690022e9cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.892593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.892848] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.924095] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b61de2-362f-ba68-9564-93abee7b7c99, 'name': SearchDatastore_Task, 'duration_secs': 0.00954} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.925073] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e299620-2213-4cea-88b6-5224ce215044 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.930212] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 839.930212] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529ddeac-db19-fd64-cfc7-bb9b91afc48b" [ 839.930212] env[62585]: _type = "Task" [ 839.930212] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.937643] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529ddeac-db19-fd64-cfc7-bb9b91afc48b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.091152] env[62585]: DEBUG nova.compute.utils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.091907] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 840.091988] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 840.150035] env[62585]: DEBUG nova.policy [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fd0e110bfc94784b4fe881fb27a48e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10320418f69d4f0e88a3adf2a8245237', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 840.241784] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "refresh_cache-679380d4-5b96-4c30-bac9-f7163f19c609" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.242136] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Instance network_info: |[{"id": "812678f9-ea0f-4419-9b6b-98690022e9cc", "address": "fa:16:3e:f9:7d:29", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap812678f9-ea", "ovs_interfaceid": "812678f9-ea0f-4419-9b6b-98690022e9cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 840.242581] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:7d:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '812678f9-ea0f-4419-9b6b-98690022e9cc', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.253845] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating folder: Project (f00751679b29472e9ab92c9e48a99925). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.257797] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4366813-e638-4d1a-aef7-9e687fe72678 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.272108] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created folder: Project (f00751679b29472e9ab92c9e48a99925) in parent group-v293962. [ 840.272318] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating folder: Instances. Parent ref: group-v293997. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.275070] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e02456ec-d252-4fd7-9b08-d7a77caae31b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.284811] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created folder: Instances in parent group-v293997. [ 840.285057] env[62585]: DEBUG oslo.service.loopingcall [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.285258] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.285535] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23f4d579-e27c-4784-b50a-06c7a3f627ba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.301133] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016168e5-676f-4483-9bdb-cdd47f5584bd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.309588] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca0fbba-71c1-4cda-964a-e40b2dc3b8bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.312821] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.312821] env[62585]: value = "task-1384715" [ 840.312821] env[62585]: _type = "Task" [ 840.312821] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.346795] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944b3950-7849-4f6e-a597-479655295af6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.354277] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384715, 'name': CreateVM_Task} progress is 15%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.361467] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acab3b84-66bc-4e6b-9f90-48d33de322d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.377009] env[62585]: DEBUG nova.compute.provider_tree [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.441062] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529ddeac-db19-fd64-cfc7-bb9b91afc48b, 'name': SearchDatastore_Task, 'duration_secs': 0.031774} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.441565] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.441619] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.441854] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcd3dd9a-6a6f-4045-bf12-18d44e1bab02 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.448240] env[62585]: DEBUG nova.compute.manager [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Received event network-changed-812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.448439] env[62585]: DEBUG nova.compute.manager [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Refreshing instance network info cache due to event network-changed-812678f9-ea0f-4419-9b6b-98690022e9cc. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 840.448711] env[62585]: DEBUG oslo_concurrency.lockutils [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] Acquiring lock "refresh_cache-679380d4-5b96-4c30-bac9-f7163f19c609" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.449091] env[62585]: DEBUG oslo_concurrency.lockutils [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] Acquired lock "refresh_cache-679380d4-5b96-4c30-bac9-f7163f19c609" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.449327] env[62585]: DEBUG nova.network.neutron [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Refreshing network info cache for port 812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.453445] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 840.453445] env[62585]: value = "task-1384716" [ 840.453445] env[62585]: _type = "Task" [ 840.453445] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.464337] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.568772] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Successfully created port: 8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.581413] env[62585]: DEBUG nova.network.neutron [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updated VIF entry in instance network info cache for port 597e2cc3-d043-4c6b-a254-2d9838a1ebf9. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.581806] env[62585]: DEBUG nova.network.neutron [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [{"id": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "address": "fa:16:3e:01:b1:72", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap597e2cc3-d0", "ovs_interfaceid": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.591162] env[62585]: INFO nova.scheduler.client.report [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted allocations for instance e4373e2a-cc21-41b7-be28-9b140ab43247 [ 840.600359] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 840.823647] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384715, 'name': CreateVM_Task, 'duration_secs': 0.505179} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.823920] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.824448] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.824672] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.824950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.825254] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1223d1d-9e4c-423e-8512-3adce9c5cb78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.832240] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 840.832240] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205c3b3-9f9f-d9d8-1342-ffb4374fd86f" [ 840.832240] env[62585]: _type = "Task" [ 840.832240] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.845077] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205c3b3-9f9f-d9d8-1342-ffb4374fd86f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.882815] env[62585]: DEBUG nova.scheduler.client.report [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.967584] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384716, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.084797] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Releasing lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.084797] env[62585]: DEBUG nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Received event network-vif-plugged-812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.084973] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Acquiring lock "679380d4-5b96-4c30-bac9-f7163f19c609-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.085195] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Lock "679380d4-5b96-4c30-bac9-f7163f19c609-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.085641] env[62585]: DEBUG oslo_concurrency.lockutils [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] Lock "679380d4-5b96-4c30-bac9-f7163f19c609-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.085641] env[62585]: DEBUG nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] No waiting events found dispatching network-vif-plugged-812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.085811] env[62585]: WARNING nova.compute.manager [req-ce0745cb-bd40-4be2-866d-2029e492304d req-9cfa9e73-97be-4e89-b782-fcb362057393 service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Received unexpected event network-vif-plugged-812678f9-ea0f-4419-9b6b-98690022e9cc for instance with vm_state building and task_state spawning. [ 841.108604] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5c07270-1c9e-4308-a754-51851c5fadf0 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "e4373e2a-cc21-41b7-be28-9b140ab43247" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.475s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.222779] env[62585]: DEBUG nova.network.neutron [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Updated VIF entry in instance network info cache for port 812678f9-ea0f-4419-9b6b-98690022e9cc. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.223186] env[62585]: DEBUG nova.network.neutron [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Updating instance_info_cache with network_info: [{"id": "812678f9-ea0f-4419-9b6b-98690022e9cc", "address": "fa:16:3e:f9:7d:29", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap812678f9-ea", "ovs_interfaceid": "812678f9-ea0f-4419-9b6b-98690022e9cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.343830] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205c3b3-9f9f-d9d8-1342-ffb4374fd86f, 'name': SearchDatastore_Task, 'duration_secs': 0.059373} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.344147] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.344378] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.344615] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.344758] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.344937] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.345443] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01b5aca8-3b87-4917-b5c8-d98985c77a66 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.358154] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.358349] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.359502] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f10364a-5d54-441d-be50-5801b6811b9d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.366673] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 841.366673] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52969c77-2b20-3ec0-d1ab-30fc21bc7ba2" [ 841.366673] env[62585]: _type = "Task" [ 841.366673] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.376919] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52969c77-2b20-3ec0-d1ab-30fc21bc7ba2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.389152] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.803s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.390063] env[62585]: ERROR nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Traceback (most recent call last): [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self.driver.spawn(context, instance, image_meta, [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self._vmops.spawn(context, instance, image_meta, injected_files, [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] vm_ref = self.build_virtual_machine(instance, [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] vif_infos = vmwarevif.get_vif_info(self._session, [ 841.390063] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] for vif in network_info: [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] return self._sync_wrapper(fn, *args, **kwargs) [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self.wait() [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self[:] = self._gt.wait() [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] return self._exit_event.wait() [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] current.throw(*self._exc) [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 841.390438] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] result = function(*args, **kwargs) [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] return func(*args, **kwargs) [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] raise e [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] nwinfo = self.network_api.allocate_for_instance( [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] created_port_ids = self._update_ports_for_instance( [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] with excutils.save_and_reraise_exception(): [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] self.force_reraise() [ 841.390766] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] raise self.value [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] updated_port = self._update_port( [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] _ensure_no_port_binding_failure(port) [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] raise exception.PortBindingFailed(port_id=port['id']) [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] nova.exception.PortBindingFailed: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. [ 841.391139] env[62585]: ERROR nova.compute.manager [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] [ 841.391139] env[62585]: DEBUG nova.compute.utils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 841.392726] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.965s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.397392] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Build of instance d207fb66-ad23-47a5-a304-ecf885de4ced was re-scheduled: Binding failed for port 9a870b5d-e28e-4aec-a108-6a7e9978d6d5, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 841.398125] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 841.398333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.398585] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.398847] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.465072] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647779} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.465556] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.465556] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.469151] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62258971-80ca-4af2-ae6e-0c6e2b37b8cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.472725] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 841.472725] env[62585]: value = "task-1384717" [ 841.472725] env[62585]: _type = "Task" [ 841.472725] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.482317] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384717, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.611714] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 841.613117] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 841.643028] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 841.643028] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 841.643028] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.643309] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 841.643309] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.643309] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 841.643309] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 841.643309] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 841.643483] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 841.643483] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 841.643483] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 841.643483] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effa54bf-7bdf-4e43-af16-da02f8bd5bfa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.652315] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f575e4-2857-4b0c-b666-f3d134ed5527 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.726590] env[62585]: DEBUG oslo_concurrency.lockutils [req-047613a0-a47f-44f9-a894-1e6e4fa7ee4c req-76264dfe-cd9e-40ac-81ee-1ef158b045bc service nova] Releasing lock "refresh_cache-679380d4-5b96-4c30-bac9-f7163f19c609" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.877517] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52969c77-2b20-3ec0-d1ab-30fc21bc7ba2, 'name': SearchDatastore_Task, 'duration_secs': 0.039126} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.878227] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af8986d3-86a2-423b-bdf8-ab31450dbddf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.883624] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 841.883624] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52be7d49-a072-2c73-e92d-47ca34920d66" [ 841.883624] env[62585]: _type = "Task" [ 841.883624] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.892546] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52be7d49-a072-2c73-e92d-47ca34920d66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.915233] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.982493] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384717, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.988416] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.071515] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a55a86-b38f-4095-86c9-0e1216bc3329 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.079448] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddce433-ba5e-4b87-987b-c51c6f43f825 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.109309] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50721770-08fc-487d-9f3b-02c458c4f4b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.116782] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75437fe6-f314-4dde-b40f-3ef924e35b8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.133911] env[62585]: DEBUG nova.compute.provider_tree [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.143875] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.182377] env[62585]: DEBUG nova.compute.manager [req-626fb954-f50f-4cff-b4bc-462b2819df20 req-3947c47b-697a-4fdf-b059-76488473e118 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Received event network-vif-plugged-8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.182629] env[62585]: DEBUG oslo_concurrency.lockutils [req-626fb954-f50f-4cff-b4bc-462b2819df20 req-3947c47b-697a-4fdf-b059-76488473e118 service nova] Acquiring lock "67e5af2f-4eec-41ec-916f-9f9b77596943-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.182801] env[62585]: DEBUG oslo_concurrency.lockutils [req-626fb954-f50f-4cff-b4bc-462b2819df20 req-3947c47b-697a-4fdf-b059-76488473e118 service nova] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.182965] env[62585]: DEBUG oslo_concurrency.lockutils [req-626fb954-f50f-4cff-b4bc-462b2819df20 req-3947c47b-697a-4fdf-b059-76488473e118 service nova] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.183263] env[62585]: DEBUG nova.compute.manager [req-626fb954-f50f-4cff-b4bc-462b2819df20 req-3947c47b-697a-4fdf-b059-76488473e118 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] No waiting events found dispatching network-vif-plugged-8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 842.183431] env[62585]: WARNING nova.compute.manager [req-626fb954-f50f-4cff-b4bc-462b2819df20 req-3947c47b-697a-4fdf-b059-76488473e118 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Received unexpected event network-vif-plugged-8360c89d-6755-4e11-b3fa-358072fa1c9b for instance with vm_state building and task_state spawning. [ 842.190408] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.190659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.300231] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Successfully updated port: 8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.395158] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52be7d49-a072-2c73-e92d-47ca34920d66, 'name': SearchDatastore_Task, 'duration_secs': 0.072565} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.395379] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.395624] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609/679380d4-5b96-4c30-bac9-f7163f19c609.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.395864] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a451813b-d0ca-4c22-9d63-b44d891da7b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.401588] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 842.401588] env[62585]: value = "task-1384718" [ 842.401588] env[62585]: _type = "Task" [ 842.401588] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.408692] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.483560] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384717, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.819907} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.483967] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.485967] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02865c6e-ffa3-4cb2-91c6-d52402ce3f1b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.498049] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-d207fb66-ad23-47a5-a304-ecf885de4ced" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.498267] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 842.498445] env[62585]: DEBUG nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 842.498604] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 842.508339] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.508981] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d335cded-8ea0-419c-b218-aeb99c8ea886 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.528609] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 842.528609] env[62585]: value = "task-1384719" [ 842.528609] env[62585]: _type = "Task" [ 842.528609] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.536360] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384719, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.547254] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.640026] env[62585]: DEBUG nova.scheduler.client.report [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 842.806629] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "refresh_cache-67e5af2f-4eec-41ec-916f-9f9b77596943" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.806925] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "refresh_cache-67e5af2f-4eec-41ec-916f-9f9b77596943" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.806925] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.911289] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.038301] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384719, 'name': ReconfigVM_Task, 'duration_secs': 0.266449} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.038575] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Reconfigured VM instance instance-00000046 to attach disk [datastore2] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.039229] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ee107a4-25fd-4d6f-947c-24c6498c0439 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.045234] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 843.045234] env[62585]: value = "task-1384720" [ 843.045234] env[62585]: _type = "Task" [ 843.045234] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.049639] env[62585]: DEBUG nova.network.neutron [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.056478] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384720, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.145075] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.752s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.145666] env[62585]: ERROR nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Traceback (most recent call last): [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self.driver.spawn(context, instance, image_meta, [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] vm_ref = self.build_virtual_machine(instance, [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] vif_infos = vmwarevif.get_vif_info(self._session, [ 843.145666] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] for vif in network_info: [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return self._sync_wrapper(fn, *args, **kwargs) [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self.wait() [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self[:] = self._gt.wait() [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return self._exit_event.wait() [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] result = hub.switch() [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 843.146250] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return self.greenlet.switch() [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] result = function(*args, **kwargs) [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] return func(*args, **kwargs) [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] raise e [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] nwinfo = self.network_api.allocate_for_instance( [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] created_port_ids = self._update_ports_for_instance( [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] with excutils.save_and_reraise_exception(): [ 843.146829] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] self.force_reraise() [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] raise self.value [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] updated_port = self._update_port( [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] _ensure_no_port_binding_failure(port) [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] raise exception.PortBindingFailed(port_id=port['id']) [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] nova.exception.PortBindingFailed: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. [ 843.147374] env[62585]: ERROR nova.compute.manager [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] [ 843.147777] env[62585]: DEBUG nova.compute.utils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. {{(pid=62585) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 843.147777] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.970s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.149143] env[62585]: INFO nova.compute.claims [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.152196] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Build of instance e89b55d3-aa15-4d28-ba80-fe3b45ee289f was re-scheduled: Binding failed for port 25f099b0-6edc-4046-90dd-9447559a5a03, please check neutron logs for more information. {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 843.152613] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Unplugging VIFs for instance {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 843.152832] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.152979] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.153150] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.341082] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.416505] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384718, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.490368] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Updating instance_info_cache with network_info: [{"id": "8360c89d-6755-4e11-b3fa-358072fa1c9b", "address": "fa:16:3e:f2:82:cb", "network": {"id": "ac24d220-e2df-46cd-8eda-ef2fd44ee270", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1994738629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10320418f69d4f0e88a3adf2a8245237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8360c89d-67", "ovs_interfaceid": "8360c89d-6755-4e11-b3fa-358072fa1c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.553363] env[62585]: INFO nova.compute.manager [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: d207fb66-ad23-47a5-a304-ecf885de4ced] Took 1.05 seconds to deallocate network for instance. [ 843.560619] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384720, 'name': Rename_Task, 'duration_secs': 0.12749} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.561218] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.561480] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84b6e9a7-8635-46d6-83a5-6fb9d3c2771e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.571100] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 843.571100] env[62585]: value = "task-1384721" [ 843.571100] env[62585]: _type = "Task" [ 843.571100] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.580092] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384721, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.673786] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.759578] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.914194] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384718, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.323421} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.914446] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609/679380d4-5b96-4c30-bac9-f7163f19c609.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.914660] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.914914] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4de4a33-9059-40e1-9736-1d30908422b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.922334] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 843.922334] env[62585]: value = "task-1384722" [ 843.922334] env[62585]: _type = "Task" [ 843.922334] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.929462] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384722, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.993450] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "refresh_cache-67e5af2f-4eec-41ec-916f-9f9b77596943" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.993735] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Instance network_info: |[{"id": "8360c89d-6755-4e11-b3fa-358072fa1c9b", "address": "fa:16:3e:f2:82:cb", "network": {"id": "ac24d220-e2df-46cd-8eda-ef2fd44ee270", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1994738629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10320418f69d4f0e88a3adf2a8245237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8360c89d-67", "ovs_interfaceid": "8360c89d-6755-4e11-b3fa-358072fa1c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 843.994179] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:82:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8360c89d-6755-4e11-b3fa-358072fa1c9b', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.001830] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Creating folder: Project (10320418f69d4f0e88a3adf2a8245237). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.002203] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25205175-ac8a-45c6-bcee-4028d400babc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.012063] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Created folder: Project (10320418f69d4f0e88a3adf2a8245237) in parent group-v293962. [ 844.012248] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Creating folder: Instances. Parent ref: group-v294000. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.012475] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aec598d1-5d79-4abd-9edc-2bbfede052b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.021809] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Created folder: Instances in parent group-v294000. [ 844.022117] env[62585]: DEBUG oslo.service.loopingcall [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.022369] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.022630] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8e0df8e-34d5-4688-85e7-1599549e1a04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.040988] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.040988] env[62585]: value = "task-1384725" [ 844.040988] env[62585]: _type = "Task" [ 844.040988] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.048461] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384725, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.080214] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384721, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.209264] env[62585]: DEBUG nova.compute.manager [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Received event network-changed-8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.209512] env[62585]: DEBUG nova.compute.manager [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Refreshing instance network info cache due to event network-changed-8360c89d-6755-4e11-b3fa-358072fa1c9b. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 844.209751] env[62585]: DEBUG oslo_concurrency.lockutils [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] Acquiring lock "refresh_cache-67e5af2f-4eec-41ec-916f-9f9b77596943" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.209894] env[62585]: DEBUG oslo_concurrency.lockutils [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] Acquired lock "refresh_cache-67e5af2f-4eec-41ec-916f-9f9b77596943" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.210166] env[62585]: DEBUG nova.network.neutron [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Refreshing network info cache for port 8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.263887] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-e89b55d3-aa15-4d28-ba80-fe3b45ee289f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.264131] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62585) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 844.264312] env[62585]: DEBUG nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 844.264476] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.280562] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.327349] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521216ea-41c4-47da-986e-f733cf56de30 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.334969] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0a82ac-68cf-4869-a17b-97e59cc77063 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.366737] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a574f9-257f-4c5c-bd2e-d32f672ca6c8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.374174] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84389512-4f43-4c83-98b3-69f8c2c1939a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.387455] env[62585]: DEBUG nova.compute.provider_tree [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.432378] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384722, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069379} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.432642] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.433400] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ae0f59-404c-4120-af13-6643b8f42e67 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.455281] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609/679380d4-5b96-4c30-bac9-f7163f19c609.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.455570] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8a5b1f3-9669-46b3-a82b-b1fcb568547d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.475080] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 844.475080] env[62585]: value = "task-1384726" [ 844.475080] env[62585]: _type = "Task" [ 844.475080] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.483941] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.551154] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384725, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.582801] env[62585]: DEBUG oslo_vmware.api [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384721, 'name': PowerOnVM_Task, 'duration_secs': 0.953796} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.583084] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.583366] env[62585]: INFO nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Took 9.37 seconds to spawn the instance on the hypervisor. [ 844.583586] env[62585]: DEBUG nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 844.584466] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fb387d-617b-492e-8dd8-9c68c98daa1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.587801] env[62585]: INFO nova.scheduler.client.report [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance d207fb66-ad23-47a5-a304-ecf885de4ced [ 844.785571] env[62585]: DEBUG nova.network.neutron [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.890979] env[62585]: DEBUG nova.scheduler.client.report [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.977855] env[62585]: DEBUG nova.network.neutron [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Updated VIF entry in instance network info cache for port 8360c89d-6755-4e11-b3fa-358072fa1c9b. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.978240] env[62585]: DEBUG nova.network.neutron [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Updating instance_info_cache with network_info: [{"id": "8360c89d-6755-4e11-b3fa-358072fa1c9b", "address": "fa:16:3e:f2:82:cb", "network": {"id": "ac24d220-e2df-46cd-8eda-ef2fd44ee270", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1994738629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10320418f69d4f0e88a3adf2a8245237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8360c89d-67", "ovs_interfaceid": "8360c89d-6755-4e11-b3fa-358072fa1c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.989720] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.051876] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384725, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.095490] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed7bb052-7630-42de-85aa-348d850c2b4d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "d207fb66-ad23-47a5-a304-ecf885de4ced" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.201s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.112270] env[62585]: INFO nova.compute.manager [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Took 27.87 seconds to build instance. [ 845.288449] env[62585]: INFO nova.compute.manager [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: e89b55d3-aa15-4d28-ba80-fe3b45ee289f] Took 1.02 seconds to deallocate network for instance. [ 845.396670] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.396670] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 845.399382] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.722s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.400805] env[62585]: INFO nova.compute.claims [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.483678] env[62585]: DEBUG oslo_concurrency.lockutils [req-9af2419e-1cd1-4a3f-9424-cb8572af0c88 req-52c89136-79b7-4396-920a-6b1290b9e8ad service nova] Releasing lock "refresh_cache-67e5af2f-4eec-41ec-916f-9f9b77596943" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.489408] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384726, 'name': ReconfigVM_Task, 'duration_secs': 0.558483} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.489671] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609/679380d4-5b96-4c30-bac9-f7163f19c609.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.490326] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac7abc00-05f2-4816-a5e3-caaebfe0a632 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.496755] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 845.496755] env[62585]: value = "task-1384727" [ 845.496755] env[62585]: _type = "Task" [ 845.496755] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.505412] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384727, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.551778] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384725, 'name': CreateVM_Task, 'duration_secs': 1.402428} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.552052] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.552977] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.553191] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.553521] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.553782] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bcbf84e-744f-4d3d-b533-1addaf845c4e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.558871] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 845.558871] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5249bd2e-a418-be62-c85c-1905460b86ae" [ 845.558871] env[62585]: _type = "Task" [ 845.558871] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.566816] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5249bd2e-a418-be62-c85c-1905460b86ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.598494] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 845.615171] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c0084a83-e101-49c2-95ca-f03ce99904fb tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.575s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.814239] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.814493] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.905060] env[62585]: DEBUG nova.compute.utils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.908906] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.909095] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.963237] env[62585]: DEBUG nova.policy [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fd0e110bfc94784b4fe881fb27a48e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10320418f69d4f0e88a3adf2a8245237', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 846.007416] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384727, 'name': Rename_Task, 'duration_secs': 0.129177} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.007416] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.007416] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d67c8b95-c53c-4c80-a640-43eed1f23988 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.012975] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 846.012975] env[62585]: value = "task-1384728" [ 846.012975] env[62585]: _type = "Task" [ 846.012975] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.020732] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.068600] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5249bd2e-a418-be62-c85c-1905460b86ae, 'name': SearchDatastore_Task, 'duration_secs': 0.008977} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.068989] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.069235] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.069605] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.069962] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.070150] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.070415] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7830e811-ad5f-4d98-aade-59d98468eedd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.084563] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.084734] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.085440] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b059be00-12c6-4eef-9716-fc3a62212bbe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.090451] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 846.090451] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528cc3d8-591e-3299-fc05-a364a411cd07" [ 846.090451] env[62585]: _type = "Task" [ 846.090451] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.098755] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528cc3d8-591e-3299-fc05-a364a411cd07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.118534] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.247371] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Successfully created port: 08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.316747] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 846.321149] env[62585]: INFO nova.scheduler.client.report [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocations for instance e89b55d3-aa15-4d28-ba80-fe3b45ee289f [ 846.329820] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "95de3c81-b764-4594-af86-66df7814d7aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.330240] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "95de3c81-b764-4594-af86-66df7814d7aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.412509] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 846.526814] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384728, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.588609] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d86c20-839d-41ca-92ed-a01fb767e013 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.601082] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528cc3d8-591e-3299-fc05-a364a411cd07, 'name': SearchDatastore_Task, 'duration_secs': 0.008563} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.603578] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e4b0cb0-ba14-4710-af75-646c827f320b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.606486] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45cf33f-a752-448b-ad78-4908ed105b76 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.616021] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 846.616021] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52793cfa-373d-5bbd-eac3-7a430fe856c3" [ 846.616021] env[62585]: _type = "Task" [ 846.616021] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.641774] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9c1ad5-cd23-4910-90a4-7267949ba3bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.649349] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52793cfa-373d-5bbd-eac3-7a430fe856c3, 'name': SearchDatastore_Task, 'duration_secs': 0.019503} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.651390] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.651647] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 67e5af2f-4eec-41ec-916f-9f9b77596943/67e5af2f-4eec-41ec-916f-9f9b77596943.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.651928] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30cfe700-114a-4841-b686-259a5c3e2afd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.654583] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c7f740-2aff-4233-aa0b-b255c71555fa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.667824] env[62585]: DEBUG nova.compute.provider_tree [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.670364] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 846.670364] env[62585]: value = "task-1384729" [ 846.670364] env[62585]: _type = "Task" [ 846.670364] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.677355] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.833009] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd33cb57-b368-41fd-a11a-1cb5b0f0d342 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "e89b55d3-aa15-4d28-ba80-fe3b45ee289f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.882s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.846798] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.027359] env[62585]: DEBUG oslo_vmware.api [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384728, 'name': PowerOnVM_Task, 'duration_secs': 0.64993} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.027359] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.027466] env[62585]: INFO nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Took 9.51 seconds to spawn the instance on the hypervisor. [ 847.027644] env[62585]: DEBUG nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.028555] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41742d29-6a7c-405b-b9ab-fa81f1d952fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.172036] env[62585]: DEBUG nova.scheduler.client.report [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 847.185914] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496622} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.186200] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 67e5af2f-4eec-41ec-916f-9f9b77596943/67e5af2f-4eec-41ec-916f-9f9b77596943.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.186407] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.186654] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a84d220-4430-4fc7-a755-6261686b9ab6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.194331] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 847.194331] env[62585]: value = "task-1384730" [ 847.194331] env[62585]: _type = "Task" [ 847.194331] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.202899] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384730, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.337835] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 847.425128] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 847.448535] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.448817] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.448977] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.449174] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.449318] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.449464] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.449665] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.449822] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.449981] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.450162] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.450332] env[62585]: DEBUG nova.virt.hardware [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.451245] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7f8717-aed0-46de-a6b3-ab998cdc33bf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.459286] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5be473-9da7-46cb-a362-5e1d543ff759 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.547976] env[62585]: INFO nova.compute.manager [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Took 28.58 seconds to build instance. [ 847.682018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.682018] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 847.685453] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.523s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.688150] env[62585]: INFO nova.compute.claims [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.704200] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384730, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070013} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.704506] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.705719] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521126e7-acef-43d9-b00f-87ef2eb38067 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.730872] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 67e5af2f-4eec-41ec-916f-9f9b77596943/67e5af2f-4eec-41ec-916f-9f9b77596943.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.730872] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33f1cabd-3fe6-48f7-a0a6-2b970e5357de {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.763040] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 847.763040] env[62585]: value = "task-1384731" [ 847.763040] env[62585]: _type = "Task" [ 847.763040] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.774234] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.804096] env[62585]: DEBUG nova.compute.manager [req-ef0135f8-60c8-4105-9024-399388b5597f req-5f7ec71c-d4e2-4e46-b37f-042702880c29 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Received event network-vif-plugged-08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.804341] env[62585]: DEBUG oslo_concurrency.lockutils [req-ef0135f8-60c8-4105-9024-399388b5597f req-5f7ec71c-d4e2-4e46-b37f-042702880c29 service nova] Acquiring lock "d644c700-c5d1-4549-b73b-0573f268dc40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.804554] env[62585]: DEBUG oslo_concurrency.lockutils [req-ef0135f8-60c8-4105-9024-399388b5597f req-5f7ec71c-d4e2-4e46-b37f-042702880c29 service nova] Lock "d644c700-c5d1-4549-b73b-0573f268dc40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.804742] env[62585]: DEBUG oslo_concurrency.lockutils [req-ef0135f8-60c8-4105-9024-399388b5597f req-5f7ec71c-d4e2-4e46-b37f-042702880c29 service nova] Lock "d644c700-c5d1-4549-b73b-0573f268dc40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.804919] env[62585]: DEBUG nova.compute.manager [req-ef0135f8-60c8-4105-9024-399388b5597f req-5f7ec71c-d4e2-4e46-b37f-042702880c29 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] No waiting events found dispatching network-vif-plugged-08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 847.805797] env[62585]: WARNING nova.compute.manager [req-ef0135f8-60c8-4105-9024-399388b5597f req-5f7ec71c-d4e2-4e46-b37f-042702880c29 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Received unexpected event network-vif-plugged-08ce05f4-704c-472a-a234-8eb9ac7e4856 for instance with vm_state building and task_state spawning. [ 847.866356] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.922295] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Successfully updated port: 08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.050671] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5f573d6f-8db2-4729-ad14-349ef93f5c2f tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "679380d4-5b96-4c30-bac9-f7163f19c609" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.292s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.149309] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76a43cb-d6bf-462c-b257-9ab9fe474e18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.158818] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Suspending the VM {{(pid=62585) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 848.159050] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a9dfe316-12ea-4bd8-afd7-988281f2700d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.165610] env[62585]: DEBUG oslo_vmware.api [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 848.165610] env[62585]: value = "task-1384732" [ 848.165610] env[62585]: _type = "Task" [ 848.165610] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.174968] env[62585]: DEBUG oslo_vmware.api [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384732, 'name': SuspendVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.198022] env[62585]: DEBUG nova.compute.utils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 848.198022] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 848.198022] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 848.269626] env[62585]: DEBUG nova.policy [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12ea5326e4bc4c7195db3aeb1b64c2b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de7e07f7b1ff417f8875ca2b8a5e85a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 848.278383] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384731, 'name': ReconfigVM_Task, 'duration_secs': 0.275901} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.279096] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 67e5af2f-4eec-41ec-916f-9f9b77596943/67e5af2f-4eec-41ec-916f-9f9b77596943.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.279942] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3e35530-f182-479c-9ba8-94d69683464c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.288501] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 848.288501] env[62585]: value = "task-1384733" [ 848.288501] env[62585]: _type = "Task" [ 848.288501] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.298642] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384733, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.425709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "refresh_cache-d644c700-c5d1-4549-b73b-0573f268dc40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.427532] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "refresh_cache-d644c700-c5d1-4549-b73b-0573f268dc40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.427532] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.655454] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Successfully created port: 8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.675663] env[62585]: DEBUG oslo_vmware.api [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384732, 'name': SuspendVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.701473] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 848.797569] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384733, 'name': Rename_Task, 'duration_secs': 0.235139} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.800387] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.800822] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c166940b-a92f-48fc-aba3-368796bfd867 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.809919] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 848.809919] env[62585]: value = "task-1384734" [ 848.809919] env[62585]: _type = "Task" [ 848.809919] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.817962] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.841613] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.841613] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.940902] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64db2812-784b-4743-a607-79cd63acea62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.949318] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0016895a-119a-46da-ab1d-d43f8a63e54f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.983423] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Successfully created port: 3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.985779] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.987940] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdca62eb-3a5b-4496-92e8-8e26db2c77f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.995915] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14aff7c-6c07-4387-aa44-802940e9fc24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.009128] env[62585]: DEBUG nova.compute.provider_tree [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.125873] env[62585]: DEBUG nova.network.neutron [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Updating instance_info_cache with network_info: [{"id": "08ce05f4-704c-472a-a234-8eb9ac7e4856", "address": "fa:16:3e:cd:20:b9", "network": {"id": "ac24d220-e2df-46cd-8eda-ef2fd44ee270", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1994738629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10320418f69d4f0e88a3adf2a8245237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ce05f4-70", "ovs_interfaceid": "08ce05f4-704c-472a-a234-8eb9ac7e4856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.177436] env[62585]: DEBUG oslo_vmware.api [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384732, 'name': SuspendVM_Task, 'duration_secs': 0.634183} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.177436] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Suspended the VM {{(pid=62585) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 849.177436] env[62585]: DEBUG nova.compute.manager [None req-b22548dd-26b3-4a8f-a5ce-866862f244ef tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 849.178667] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdba0cfb-c786-4a85-b35a-fe86390194f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.317942] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384734, 'name': PowerOnVM_Task, 'duration_secs': 0.503869} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.318437] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.318437] env[62585]: INFO nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Took 7.71 seconds to spawn the instance on the hypervisor. [ 849.318600] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 849.319509] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c041724f-92b4-42a9-9ade-b6b34b555b82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.345027] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 849.514386] env[62585]: DEBUG nova.scheduler.client.report [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.628941] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "refresh_cache-d644c700-c5d1-4549-b73b-0573f268dc40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.629293] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Instance network_info: |[{"id": "08ce05f4-704c-472a-a234-8eb9ac7e4856", "address": "fa:16:3e:cd:20:b9", "network": {"id": "ac24d220-e2df-46cd-8eda-ef2fd44ee270", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1994738629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10320418f69d4f0e88a3adf2a8245237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ce05f4-70", "ovs_interfaceid": "08ce05f4-704c-472a-a234-8eb9ac7e4856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 849.629701] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:20:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08ce05f4-704c-472a-a234-8eb9ac7e4856', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.637034] env[62585]: DEBUG oslo.service.loopingcall [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.637237] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 849.637464] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de85a065-6d20-45d3-a3c1-3c141c64d745 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.657792] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.657792] env[62585]: value = "task-1384735" [ 849.657792] env[62585]: _type = "Task" [ 849.657792] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.665283] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384735, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.710394] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 849.736453] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.736707] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.737035] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.737293] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.737479] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.737635] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.737845] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.738012] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.738182] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.738343] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.738558] env[62585]: DEBUG nova.virt.hardware [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.739674] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdeade9-eb21-4a70-8951-2fa93d29ea2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.746880] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3b70f4-3c2b-4ad5-8039-09935b601663 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.835908] env[62585]: DEBUG nova.compute.manager [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Received event network-changed-08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.836149] env[62585]: DEBUG nova.compute.manager [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Refreshing instance network info cache due to event network-changed-08ce05f4-704c-472a-a234-8eb9ac7e4856. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 849.836415] env[62585]: DEBUG oslo_concurrency.lockutils [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] Acquiring lock "refresh_cache-d644c700-c5d1-4549-b73b-0573f268dc40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.836573] env[62585]: DEBUG oslo_concurrency.lockutils [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] Acquired lock "refresh_cache-d644c700-c5d1-4549-b73b-0573f268dc40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.836835] env[62585]: DEBUG nova.network.neutron [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Refreshing network info cache for port 08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.840190] env[62585]: INFO nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Took 26.94 seconds to build instance. [ 849.865531] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.019601] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.020177] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 850.023025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.218s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.024389] env[62585]: INFO nova.compute.claims [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.168258] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384735, 'name': CreateVM_Task, 'duration_secs': 0.431592} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.168421] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.169159] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.169325] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.169647] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 850.169893] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc607013-b53f-4334-be6d-f31364d2caf4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.174214] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 850.174214] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52414e0b-e732-a171-5231-ad204e2def90" [ 850.174214] env[62585]: _type = "Task" [ 850.174214] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.181234] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52414e0b-e732-a171-5231-ad204e2def90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.343433] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.745s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.490930] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Successfully updated port: 8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.528406] env[62585]: DEBUG nova.compute.utils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 850.532029] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 850.532387] env[62585]: DEBUG nova.network.neutron [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.575411] env[62585]: DEBUG nova.policy [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26abf4eaa71482b8fd3c6425a9c683d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48929b5f0c2c41ddade223ab57002fc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.579582] env[62585]: DEBUG nova.network.neutron [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Updated VIF entry in instance network info cache for port 08ce05f4-704c-472a-a234-8eb9ac7e4856. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.580039] env[62585]: DEBUG nova.network.neutron [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Updating instance_info_cache with network_info: [{"id": "08ce05f4-704c-472a-a234-8eb9ac7e4856", "address": "fa:16:3e:cd:20:b9", "network": {"id": "ac24d220-e2df-46cd-8eda-ef2fd44ee270", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1994738629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10320418f69d4f0e88a3adf2a8245237", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ce05f4-70", "ovs_interfaceid": "08ce05f4-704c-472a-a234-8eb9ac7e4856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.685696] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52414e0b-e732-a171-5231-ad204e2def90, 'name': SearchDatastore_Task, 'duration_secs': 0.01501} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.686256] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.686683] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.687073] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.687372] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.687688] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.688090] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-666a53cd-3140-46d2-9781-17b03b6f5bdb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.699020] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.699020] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.699020] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-377c72fb-7803-4ff9-be11-2d8687844dba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.703859] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 850.703859] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525848ee-3937-fdb2-5352-a7a4aef3c26a" [ 850.703859] env[62585]: _type = "Task" [ 850.703859] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.711963] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525848ee-3937-fdb2-5352-a7a4aef3c26a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.934918] env[62585]: DEBUG nova.network.neutron [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Successfully created port: 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.036815] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 851.083804] env[62585]: DEBUG oslo_concurrency.lockutils [req-e3c2860c-5def-4b3a-b5fb-8351ae833e93 req-b8f4212e-b61b-4560-995c-825d8993514b service nova] Releasing lock "refresh_cache-d644c700-c5d1-4549-b73b-0573f268dc40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.223187] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525848ee-3937-fdb2-5352-a7a4aef3c26a, 'name': SearchDatastore_Task, 'duration_secs': 0.008363} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.223930] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4042385-1d11-4fc9-a174-4d30e6113e2d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.233235] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 851.233235] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524529ce-3888-4fbf-7489-854f1d378f5a" [ 851.233235] env[62585]: _type = "Task" [ 851.233235] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.239765] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524529ce-3888-4fbf-7489-854f1d378f5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.256814] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c71443-9525-4e36-afd6-56fbf69c937d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.264713] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1be52ec-48b7-402a-8804-b1fbdb160b52 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.297581] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f021e9f-215b-45cc-ac22-2132b08c9a13 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.305128] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754e17d3-86a4-470a-a950-a8f50e245456 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.318014] env[62585]: DEBUG nova.compute.provider_tree [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.453199] env[62585]: DEBUG nova.compute.manager [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.454124] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0160502-754f-4f8b-a033-54498b81bcd1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.742076] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524529ce-3888-4fbf-7489-854f1d378f5a, 'name': SearchDatastore_Task, 'duration_secs': 0.012488} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.742537] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.742627] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d644c700-c5d1-4549-b73b-0573f268dc40/d644c700-c5d1-4549-b73b-0573f268dc40.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.742843] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9528907a-d428-4e4e-8b39-6d4712284874 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.749136] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 851.749136] env[62585]: value = "task-1384736" [ 851.749136] env[62585]: _type = "Task" [ 851.749136] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.756911] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.821559] env[62585]: DEBUG nova.scheduler.client.report [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.873189] env[62585]: DEBUG nova.compute.manager [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received event network-vif-plugged-8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.873189] env[62585]: DEBUG oslo_concurrency.lockutils [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] Acquiring lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.873189] env[62585]: DEBUG oslo_concurrency.lockutils [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] Lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.873189] env[62585]: DEBUG oslo_concurrency.lockutils [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] Lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.873189] env[62585]: DEBUG nova.compute.manager [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] No waiting events found dispatching network-vif-plugged-8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 851.873372] env[62585]: WARNING nova.compute.manager [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received unexpected event network-vif-plugged-8655c15e-04e0-4f9c-9b74-c037b8553046 for instance with vm_state building and task_state spawning. [ 851.873764] env[62585]: DEBUG nova.compute.manager [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received event network-changed-8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.874472] env[62585]: DEBUG nova.compute.manager [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Refreshing instance network info cache due to event network-changed-8655c15e-04e0-4f9c-9b74-c037b8553046. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 851.874786] env[62585]: DEBUG oslo_concurrency.lockutils [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] Acquiring lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.875079] env[62585]: DEBUG oslo_concurrency.lockutils [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] Acquired lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.875375] env[62585]: DEBUG nova.network.neutron [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Refreshing network info cache for port 8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.967678] env[62585]: INFO nova.compute.manager [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] instance snapshotting [ 851.967904] env[62585]: WARNING nova.compute.manager [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 851.971254] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cc6fa3-34ee-4d80-a55d-b6567efea667 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.991696] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7693456d-88f2-4c3a-98e9-ef271b0db86a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.051506] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 852.076264] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.076534] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.076689] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.076873] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.077070] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.077199] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.077382] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.077538] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.077703] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.077863] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.078051] env[62585]: DEBUG nova.virt.hardware [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.079252] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010cef3b-7181-4d3a-a1ca-2792a912bf20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.089253] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d48c0f-2d11-42ea-a831-ba572346e7f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.259699] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508936} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.259984] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d644c700-c5d1-4549-b73b-0573f268dc40/d644c700-c5d1-4549-b73b-0573f268dc40.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.260211] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.260493] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-157386cd-b987-41de-81bb-a88eb5f0981f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.269123] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 852.269123] env[62585]: value = "task-1384737" [ 852.269123] env[62585]: _type = "Task" [ 852.269123] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.275728] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.327802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.328369] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 852.331820] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.674s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.424977] env[62585]: DEBUG nova.compute.manager [req-6f0a4443-89ea-4e83-b353-3ebc70655a38 req-b52018eb-3d9f-43f6-9b9c-e32d48b0e38d service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-vif-plugged-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.425344] env[62585]: DEBUG oslo_concurrency.lockutils [req-6f0a4443-89ea-4e83-b353-3ebc70655a38 req-b52018eb-3d9f-43f6-9b9c-e32d48b0e38d service nova] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.425564] env[62585]: DEBUG oslo_concurrency.lockutils [req-6f0a4443-89ea-4e83-b353-3ebc70655a38 req-b52018eb-3d9f-43f6-9b9c-e32d48b0e38d service nova] Lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.425788] env[62585]: DEBUG oslo_concurrency.lockutils [req-6f0a4443-89ea-4e83-b353-3ebc70655a38 req-b52018eb-3d9f-43f6-9b9c-e32d48b0e38d service nova] Lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.426047] env[62585]: DEBUG nova.compute.manager [req-6f0a4443-89ea-4e83-b353-3ebc70655a38 req-b52018eb-3d9f-43f6-9b9c-e32d48b0e38d service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] No waiting events found dispatching network-vif-plugged-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 852.426250] env[62585]: WARNING nova.compute.manager [req-6f0a4443-89ea-4e83-b353-3ebc70655a38 req-b52018eb-3d9f-43f6-9b9c-e32d48b0e38d service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received unexpected event network-vif-plugged-840822b3-e947-451f-90bf-03eafebebf95 for instance with vm_state building and task_state spawning. [ 852.453595] env[62585]: DEBUG nova.network.neutron [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.505019] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Creating Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 852.505402] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cfe36c64-7aed-415b-bd96-740795587a4a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.513719] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 852.513719] env[62585]: value = "task-1384738" [ 852.513719] env[62585]: _type = "Task" [ 852.513719] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.523505] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384738, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.559604] env[62585]: DEBUG nova.compute.manager [req-4d353d5d-ee0e-4430-9f85-d77ccc1245b2 req-d2259593-5218-4405-90d9-1a366725855e service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received event network-vif-plugged-3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.560354] env[62585]: DEBUG oslo_concurrency.lockutils [req-4d353d5d-ee0e-4430-9f85-d77ccc1245b2 req-d2259593-5218-4405-90d9-1a366725855e service nova] Acquiring lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.560354] env[62585]: DEBUG oslo_concurrency.lockutils [req-4d353d5d-ee0e-4430-9f85-d77ccc1245b2 req-d2259593-5218-4405-90d9-1a366725855e service nova] Lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.560354] env[62585]: DEBUG oslo_concurrency.lockutils [req-4d353d5d-ee0e-4430-9f85-d77ccc1245b2 req-d2259593-5218-4405-90d9-1a366725855e service nova] Lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.560476] env[62585]: DEBUG nova.compute.manager [req-4d353d5d-ee0e-4430-9f85-d77ccc1245b2 req-d2259593-5218-4405-90d9-1a366725855e service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] No waiting events found dispatching network-vif-plugged-3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 852.560632] env[62585]: WARNING nova.compute.manager [req-4d353d5d-ee0e-4430-9f85-d77ccc1245b2 req-d2259593-5218-4405-90d9-1a366725855e service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received unexpected event network-vif-plugged-3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a for instance with vm_state building and task_state spawning. [ 852.645790] env[62585]: DEBUG nova.network.neutron [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.712857] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Successfully updated port: 3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.776244] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179393} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.776609] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.777265] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115c744e-9ca7-4b05-b7f6-92c8bf944769 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.798899] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] d644c700-c5d1-4549-b73b-0573f268dc40/d644c700-c5d1-4549-b73b-0573f268dc40.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.799180] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7a2700a-fcfc-4898-9972-52239e5a49b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.818503] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 852.818503] env[62585]: value = "task-1384739" [ 852.818503] env[62585]: _type = "Task" [ 852.818503] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.831705] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384739, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.840258] env[62585]: DEBUG nova.compute.utils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.842163] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 852.842346] env[62585]: DEBUG nova.network.neutron [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.882222] env[62585]: DEBUG nova.policy [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cbaeae4fa1e4dc996a4d8a364ea0dae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44e32d293ad64cd499926859857e023e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 852.997971] env[62585]: DEBUG nova.network.neutron [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Successfully updated port: 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.024553] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384738, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.150931] env[62585]: DEBUG oslo_concurrency.lockutils [req-a0b3bdc5-07e5-4017-b4f8-4956b3d6dcd2 req-9c1eb355-02af-4f34-956a-70a5982b1658 service nova] Releasing lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.204953] env[62585]: DEBUG nova.network.neutron [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Successfully created port: acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.215648] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.215792] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquired lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.217403] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.328552] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.346468] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 853.372365] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 8763a058-b453-4f03-9532-7d7e65efdfb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.372553] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 6057e13b-71df-458d-b6ed-c139a8c57836 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.372697] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance abf4a205-fcee-46e4-85b6-10a452cc0312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.372822] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 679380d4-5b96-4c30-bac9-f7163f19c609 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.372972] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 67e5af2f-4eec-41ec-916f-9f9b77596943 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.373126] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d644c700-c5d1-4549-b73b-0573f268dc40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.373269] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d2c6418c-b070-4c46-824b-18638e9b569f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.373398] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance a634a80e-d90a-4ce3-8233-75657a7754be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.373537] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 70ac6289-2f14-4fb0-a811-97d76cafc532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.501472] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.501472] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.501472] env[62585]: DEBUG nova.network.neutron [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.523990] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384738, 'name': CreateSnapshot_Task, 'duration_secs': 0.992096} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.524347] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Created Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 853.525081] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f4356b-44fa-430c-9593-b52518c36071 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.751686] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.832563] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384739, 'name': ReconfigVM_Task, 'duration_secs': 0.668766} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.832874] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Reconfigured VM instance instance-00000049 to attach disk [datastore1] d644c700-c5d1-4549-b73b-0573f268dc40/d644c700-c5d1-4549-b73b-0573f268dc40.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.833615] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a69f2245-b8b1-4a05-9e7d-634702fdcd24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.840908] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 853.840908] env[62585]: value = "task-1384740" [ 853.840908] env[62585]: _type = "Task" [ 853.840908] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.848523] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384740, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.877891] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance b2d2a012-a62f-4237-95c3-d7153d6b223c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 853.975522] env[62585]: DEBUG nova.compute.manager [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-changed-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.975818] env[62585]: DEBUG nova.compute.manager [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing instance network info cache due to event network-changed-840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 853.975896] env[62585]: DEBUG oslo_concurrency.lockutils [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.034160] env[62585]: DEBUG nova.network.neutron [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.044063] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Creating linked-clone VM from snapshot {{(pid=62585) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 854.047061] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1816c721-57cf-4cb9-894f-cd5f8df04935 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.055093] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 854.055093] env[62585]: value = "task-1384741" [ 854.055093] env[62585]: _type = "Task" [ 854.055093] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.064338] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384741, 'name': CloneVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.225593] env[62585]: DEBUG nova.network.neutron [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.272335] env[62585]: DEBUG nova.network.neutron [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Updating instance_info_cache with network_info: [{"id": "8655c15e-04e0-4f9c-9b74-c037b8553046", "address": "fa:16:3e:a0:ea:0a", "network": {"id": "d50a4dfd-37fb-4b4b-b3d9-2fc0a25fcd70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1462317556", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7e07f7b1ff417f8875ca2b8a5e85a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8655c15e-04", "ovs_interfaceid": "8655c15e-04e0-4f9c-9b74-c037b8553046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a", "address": "fa:16:3e:e5:20:cc", "network": {"id": "420f2281-4be2-4fd5-9a44-4e6a47881e2b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-370317843", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "de7e07f7b1ff417f8875ca2b8a5e85a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e63b09d-20", "ovs_interfaceid": "3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.353633] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384740, 'name': Rename_Task, 'duration_secs': 0.140946} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.353762] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.356078] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 854.356825] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea81904b-9554-4aa1-9283-0a6d07760a42 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.363207] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 854.363207] env[62585]: value = "task-1384742" [ 854.363207] env[62585]: _type = "Task" [ 854.363207] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.370781] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.380238] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 854.380238] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 854.380238] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.380238] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 854.380520] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.380520] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 854.380964] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 854.380964] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 854.381592] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 854.381783] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 854.381998] env[62585]: DEBUG nova.virt.hardware [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 854.382690] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d96a04d7-b07f-439d-aafa-09dc70a4d1a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 854.387645] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea2d82d-b857-4e72-aec8-99c8d488d30e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.393492] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d982b460-3ae2-455d-ae18-11964fb3ff25 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.565187] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384741, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.594745] env[62585]: DEBUG nova.compute.manager [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received event network-changed-3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.594990] env[62585]: DEBUG nova.compute.manager [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Refreshing instance network info cache due to event network-changed-3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 854.595145] env[62585]: DEBUG oslo_concurrency.lockutils [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] Acquiring lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.722203] env[62585]: DEBUG nova.network.neutron [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Successfully updated port: acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.730284] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.730284] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Instance network_info: |[{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 854.730284] env[62585]: DEBUG oslo_concurrency.lockutils [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.730284] env[62585]: DEBUG nova.network.neutron [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing network info cache for port 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.731226] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:2a:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '840822b3-e947-451f-90bf-03eafebebf95', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.739998] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Creating folder: Project (48929b5f0c2c41ddade223ab57002fc4). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 854.741083] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8457906-471a-4402-8074-dd53e1623f65 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.752557] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Created folder: Project (48929b5f0c2c41ddade223ab57002fc4) in parent group-v293962. [ 854.752761] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Creating folder: Instances. Parent ref: group-v294006. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 854.752999] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2660b92a-2ac3-4b44-9a72-a6132844d304 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.762536] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Created folder: Instances in parent group-v294006. [ 854.762843] env[62585]: DEBUG oslo.service.loopingcall [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.763085] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.763321] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-914dbbb5-6fb6-4b14-ad27-7ae4199dcb17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.778735] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Releasing lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.779147] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Instance network_info: |[{"id": "8655c15e-04e0-4f9c-9b74-c037b8553046", "address": "fa:16:3e:a0:ea:0a", "network": {"id": "d50a4dfd-37fb-4b4b-b3d9-2fc0a25fcd70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1462317556", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7e07f7b1ff417f8875ca2b8a5e85a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8655c15e-04", "ovs_interfaceid": "8655c15e-04e0-4f9c-9b74-c037b8553046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a", "address": "fa:16:3e:e5:20:cc", "network": {"id": "420f2281-4be2-4fd5-9a44-4e6a47881e2b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-370317843", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "de7e07f7b1ff417f8875ca2b8a5e85a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e63b09d-20", "ovs_interfaceid": "3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 854.779468] env[62585]: DEBUG oslo_concurrency.lockutils [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] Acquired lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.779680] env[62585]: DEBUG nova.network.neutron [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Refreshing network info cache for port 3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.780954] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:ea:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8655c15e-04e0-4f9c-9b74-c037b8553046', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:20:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.790285] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Creating folder: Project (de7e07f7b1ff417f8875ca2b8a5e85a1). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 854.791770] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94df56af-fc1c-43aa-912c-122637348ef1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.796812] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.796812] env[62585]: value = "task-1384745" [ 854.796812] env[62585]: _type = "Task" [ 854.796812] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.801478] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Created folder: Project (de7e07f7b1ff417f8875ca2b8a5e85a1) in parent group-v293962. [ 854.801698] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Creating folder: Instances. Parent ref: group-v294008. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 854.804698] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0091e8fb-4b87-421a-84b4-39dee60b4259 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.806349] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384745, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.814509] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Created folder: Instances in parent group-v294008. [ 854.814751] env[62585]: DEBUG oslo.service.loopingcall [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.814943] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.815164] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5039f1f0-b1b6-4b98-9169-1a60afdcfe10 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.838072] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.838072] env[62585]: value = "task-1384748" [ 854.838072] env[62585]: _type = "Task" [ 854.838072] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.848620] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384748, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.876156] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384742, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.889134] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 855.067660] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384741, 'name': CloneVM_Task} progress is 95%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.224741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.224943] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.225114] env[62585]: DEBUG nova.network.neutron [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.306915] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384745, 'name': CreateVM_Task, 'duration_secs': 0.466415} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.306915] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.307536] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.307697] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.308014] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 855.308308] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91c8cdd7-453b-48cc-ab15-79372a6f89a9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.312558] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 855.312558] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e7b77d-45d7-7203-40b7-e81dd50bbba7" [ 855.312558] env[62585]: _type = "Task" [ 855.312558] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.319903] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e7b77d-45d7-7203-40b7-e81dd50bbba7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.347216] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384748, 'name': CreateVM_Task, 'duration_secs': 0.42494} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.351144] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.352040] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.378237] env[62585]: DEBUG oslo_vmware.api [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384742, 'name': PowerOnVM_Task, 'duration_secs': 0.708731} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.378498] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.378693] env[62585]: INFO nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Took 7.95 seconds to spawn the instance on the hypervisor. [ 855.378884] env[62585]: DEBUG nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.379816] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc92cde-9aa5-4494-b16b-fe77ed6db0d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.391699] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 95de3c81-b764-4594-af86-66df7814d7aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 855.542826] env[62585]: DEBUG nova.network.neutron [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updated VIF entry in instance network info cache for port 840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.543227] env[62585]: DEBUG nova.network.neutron [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.565767] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384741, 'name': CloneVM_Task, 'duration_secs': 1.190615} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.566023] env[62585]: INFO nova.virt.vmwareapi.vmops [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Created linked-clone VM from snapshot [ 855.566736] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47e43ff-1700-492d-94db-3b6095bb6851 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.573987] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Uploading image 6d23e817-7b18-4e33-b9b5-50d1b647249d {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 855.576500] env[62585]: DEBUG nova.network.neutron [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Updated VIF entry in instance network info cache for port 3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.576936] env[62585]: DEBUG nova.network.neutron [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Updating instance_info_cache with network_info: [{"id": "8655c15e-04e0-4f9c-9b74-c037b8553046", "address": "fa:16:3e:a0:ea:0a", "network": {"id": "d50a4dfd-37fb-4b4b-b3d9-2fc0a25fcd70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1462317556", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7e07f7b1ff417f8875ca2b8a5e85a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8655c15e-04", "ovs_interfaceid": "8655c15e-04e0-4f9c-9b74-c037b8553046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a", "address": "fa:16:3e:e5:20:cc", "network": {"id": "420f2281-4be2-4fd5-9a44-4e6a47881e2b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-370317843", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "de7e07f7b1ff417f8875ca2b8a5e85a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e63b09d-20", "ovs_interfaceid": "3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.594826] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 855.594826] env[62585]: value = "vm-294005" [ 855.594826] env[62585]: _type = "VirtualMachine" [ 855.594826] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 855.595103] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6f12eb63-aaa6-4ac7-99f0-b98360573e4c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.602897] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lease: (returnval){ [ 855.602897] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52667cbf-3781-1d43-657a-ba6f427385d4" [ 855.602897] env[62585]: _type = "HttpNfcLease" [ 855.602897] env[62585]: } obtained for exporting VM: (result){ [ 855.602897] env[62585]: value = "vm-294005" [ 855.602897] env[62585]: _type = "VirtualMachine" [ 855.602897] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 855.603282] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the lease: (returnval){ [ 855.603282] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52667cbf-3781-1d43-657a-ba6f427385d4" [ 855.603282] env[62585]: _type = "HttpNfcLease" [ 855.603282] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 855.609986] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.609986] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52667cbf-3781-1d43-657a-ba6f427385d4" [ 855.609986] env[62585]: _type = "HttpNfcLease" [ 855.609986] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 855.757972] env[62585]: DEBUG nova.network.neutron [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.822780] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e7b77d-45d7-7203-40b7-e81dd50bbba7, 'name': SearchDatastore_Task, 'duration_secs': 0.012103} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.823072] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.823316] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.823573] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.823725] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.824135] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.825173] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.825458] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 855.825701] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b63fde57-e618-47da-87bc-a0c8fe83e826 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.829967] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-559a8801-29c4-471b-b954-ea410f43d585 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.837221] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 855.837221] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522274cc-9990-1cf7-1b5c-4a2b098e8d6a" [ 855.837221] env[62585]: _type = "Task" [ 855.837221] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.841608] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.841969] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.842791] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d42ae7b-0e3f-4de7-8a64-258b523b590a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.848014] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522274cc-9990-1cf7-1b5c-4a2b098e8d6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.851083] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 855.851083] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522dc879-78c3-4787-8977-b3c926850f74" [ 855.851083] env[62585]: _type = "Task" [ 855.851083] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.858196] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522dc879-78c3-4787-8977-b3c926850f74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.895260] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance f1bfef38-b6d0-40d0-8e60-310f8a75dd78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 855.895489] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 855.895632] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 855.899317] env[62585]: INFO nova.compute.manager [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Took 21.75 seconds to build instance. [ 855.957609] env[62585]: DEBUG nova.network.neutron [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updating instance_info_cache with network_info: [{"id": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "address": "fa:16:3e:1c:e9:bf", "network": {"id": "87a03b94-3a0b-4ec4-92a2-a71a086076ac", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-309846620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44e32d293ad64cd499926859857e023e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacdb870f-a3", "ovs_interfaceid": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.046269] env[62585]: DEBUG oslo_concurrency.lockutils [req-81fe7d03-5ca8-47a8-b2fe-465feec7afec req-6578943f-9d20-4d51-ab8e-2e0f6521ce02 service nova] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.067028] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ce63b5-d544-4326-a76c-ad282643307a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.074955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03824536-47f2-48a1-be02-d0efc8b22841 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.079487] env[62585]: DEBUG oslo_concurrency.lockutils [req-da90f6d9-0173-4546-abb9-e9092db067e5 req-3ebb2475-a08c-4bca-b9a7-8aef1ab02e05 service nova] Releasing lock "refresh_cache-d2c6418c-b070-4c46-824b-18638e9b569f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.103752] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c45a93-e41f-4260-9244-c2917ea44c06 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.114680] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0857c4-b239-482d-90c3-824bd99b4e22 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.118236] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 856.118236] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52667cbf-3781-1d43-657a-ba6f427385d4" [ 856.118236] env[62585]: _type = "HttpNfcLease" [ 856.118236] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 856.118521] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 856.118521] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52667cbf-3781-1d43-657a-ba6f427385d4" [ 856.118521] env[62585]: _type = "HttpNfcLease" [ 856.118521] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 856.119533] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212ecb9d-6cd6-40f6-a486-17c655ab755b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.129103] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.135481] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c365af-3f56-c91a-d04e-400d11af09ba/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 856.135481] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c365af-3f56-c91a-d04e-400d11af09ba/disk-0.vmdk for reading. {{(pid=62585) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 856.233993] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b3682b11-51a3-464d-b12f-0f97204508f5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.349118] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522274cc-9990-1cf7-1b5c-4a2b098e8d6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.349359] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.349582] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.349794] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.359893] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522dc879-78c3-4787-8977-b3c926850f74, 'name': SearchDatastore_Task, 'duration_secs': 0.01098} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.360583] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f618b2a-f4cf-47b6-aaef-24c53cb0b2df {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.365797] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 856.365797] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52067ff2-5aa8-ce46-18d4-8f4dd6541962" [ 856.365797] env[62585]: _type = "Task" [ 856.365797] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.375059] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52067ff2-5aa8-ce46-18d4-8f4dd6541962, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.404668] env[62585]: DEBUG oslo_concurrency.lockutils [None req-028bb109-dccc-4f33-a967-e59fafc8833c tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "d644c700-c5d1-4549-b73b-0573f268dc40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.784s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.461921] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.462268] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Instance network_info: |[{"id": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "address": "fa:16:3e:1c:e9:bf", "network": {"id": "87a03b94-3a0b-4ec4-92a2-a71a086076ac", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-309846620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44e32d293ad64cd499926859857e023e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacdb870f-a3", "ovs_interfaceid": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 856.462761] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:e9:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4b6ddb2-2e19-4031-9b22-add90d41a114', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acdb870f-a3ba-445e-96f3-64fdd59c10a8', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.472785] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Creating folder: Project (44e32d293ad64cd499926859857e023e). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.473280] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b90a1ae-7e13-4c15-b68b-aa3f4402c4af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.483704] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Created folder: Project (44e32d293ad64cd499926859857e023e) in parent group-v293962. [ 856.484010] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Creating folder: Instances. Parent ref: group-v294012. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.484335] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e81b7655-6c20-4b86-9ec7-9aca69c1d6b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.493089] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Created folder: Instances in parent group-v294012. [ 856.493356] env[62585]: DEBUG oslo.service.loopingcall [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.493576] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.493804] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e72db3b-4e0b-4f75-b995-c3f5d93097a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.515778] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.515778] env[62585]: value = "task-1384752" [ 856.515778] env[62585]: _type = "Task" [ 856.515778] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.524169] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384752, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.632689] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.711080] env[62585]: DEBUG nova.compute.manager [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Received event network-vif-plugged-acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 856.711450] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.711751] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.711989] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.712230] env[62585]: DEBUG nova.compute.manager [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] No waiting events found dispatching network-vif-plugged-acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 856.712637] env[62585]: WARNING nova.compute.manager [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Received unexpected event network-vif-plugged-acdb870f-a3ba-445e-96f3-64fdd59c10a8 for instance with vm_state building and task_state spawning. [ 856.712831] env[62585]: DEBUG nova.compute.manager [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Received event network-changed-acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 856.712997] env[62585]: DEBUG nova.compute.manager [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Refreshing instance network info cache due to event network-changed-acdb870f-a3ba-445e-96f3-64fdd59c10a8. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 856.713225] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] Acquiring lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.713370] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] Acquired lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.713558] env[62585]: DEBUG nova.network.neutron [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Refreshing network info cache for port acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.878465] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52067ff2-5aa8-ce46-18d4-8f4dd6541962, 'name': SearchDatastore_Task, 'duration_secs': 0.010743} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.878833] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.879122] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] a634a80e-d90a-4ce3-8233-75657a7754be/a634a80e-d90a-4ce3-8233-75657a7754be.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.879457] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.879929] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.880913] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-593ab88d-eb86-44b9-abfd-1eddd0dda804 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.883213] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9e7c97a-4fa2-4393-94eb-9decca7192c0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.892185] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 856.892185] env[62585]: value = "task-1384753" [ 856.892185] env[62585]: _type = "Task" [ 856.892185] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.896865] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.897193] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.898509] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22741bf4-73f9-433f-8352-27960def6245 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.904423] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384753, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.908044] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 856.908044] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227bc0c-e25a-0f29-1a5b-f3bc4df0df61" [ 856.908044] env[62585]: _type = "Task" [ 856.908044] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.918318] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227bc0c-e25a-0f29-1a5b-f3bc4df0df61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.026630] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384752, 'name': CreateVM_Task, 'duration_secs': 0.289515} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.026891] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.027687] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.029142] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.029142] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.029142] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deea1712-1803-4a0e-a144-8171afe5449d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.033594] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 857.033594] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5241f3bc-a3b0-b19d-7db7-6e52a2e6959c" [ 857.033594] env[62585]: _type = "Task" [ 857.033594] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.042387] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5241f3bc-a3b0-b19d-7db7-6e52a2e6959c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.138598] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 857.139225] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.807s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.139365] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.996s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.141318] env[62585]: INFO nova.compute.claims [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.269891] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "67e5af2f-4eec-41ec-916f-9f9b77596943" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.270454] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.270755] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "67e5af2f-4eec-41ec-916f-9f9b77596943-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.271039] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.271383] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.273819] env[62585]: INFO nova.compute.manager [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Terminating instance [ 857.275976] env[62585]: DEBUG nova.compute.manager [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 857.276301] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.277274] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139ff40d-fbf3-4203-a9b5-f20441903ffa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.286466] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.286952] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cab3ae60-64ba-42b1-9f50-50f0d93c6237 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.293980] env[62585]: DEBUG oslo_vmware.api [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 857.293980] env[62585]: value = "task-1384754" [ 857.293980] env[62585]: _type = "Task" [ 857.293980] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.302728] env[62585]: DEBUG oslo_vmware.api [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.315268] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.343508] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "d644c700-c5d1-4549-b73b-0573f268dc40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.343844] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "d644c700-c5d1-4549-b73b-0573f268dc40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.344126] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "d644c700-c5d1-4549-b73b-0573f268dc40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.344427] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "d644c700-c5d1-4549-b73b-0573f268dc40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.344612] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "d644c700-c5d1-4549-b73b-0573f268dc40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.347203] env[62585]: INFO nova.compute.manager [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Terminating instance [ 857.349381] env[62585]: DEBUG nova.compute.manager [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 857.349582] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.350694] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae2d083-12d4-4df1-8f31-4969888b179c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.359928] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.360325] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2058756b-184f-4693-89ee-66ed63e3f932 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.367623] env[62585]: DEBUG oslo_vmware.api [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 857.367623] env[62585]: value = "task-1384755" [ 857.367623] env[62585]: _type = "Task" [ 857.367623] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.376748] env[62585]: DEBUG oslo_vmware.api [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.402437] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384753, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.419705] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227bc0c-e25a-0f29-1a5b-f3bc4df0df61, 'name': SearchDatastore_Task, 'duration_secs': 0.010515} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.423558] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1150f829-f77c-47d4-9741-31417471b041 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.430182] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 857.430182] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b95890-650d-af4e-24a8-aef6756513e7" [ 857.430182] env[62585]: _type = "Task" [ 857.430182] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.440311] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b95890-650d-af4e-24a8-aef6756513e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.465039] env[62585]: DEBUG nova.network.neutron [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updated VIF entry in instance network info cache for port acdb870f-a3ba-445e-96f3-64fdd59c10a8. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.465495] env[62585]: DEBUG nova.network.neutron [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updating instance_info_cache with network_info: [{"id": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "address": "fa:16:3e:1c:e9:bf", "network": {"id": "87a03b94-3a0b-4ec4-92a2-a71a086076ac", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-309846620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44e32d293ad64cd499926859857e023e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacdb870f-a3", "ovs_interfaceid": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.545514] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5241f3bc-a3b0-b19d-7db7-6e52a2e6959c, 'name': SearchDatastore_Task, 'duration_secs': 0.010614} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.545900] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.546170] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.546414] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.803918] env[62585]: DEBUG oslo_vmware.api [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384754, 'name': PowerOffVM_Task, 'duration_secs': 0.19327} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.804307] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.804517] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.804836] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ebec265-e97f-45f2-8cfd-1c33169a0a42 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.878165] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.879531] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.879531] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleting the datastore file [datastore2] 67e5af2f-4eec-41ec-916f-9f9b77596943 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.881652] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8de1b2ff-2010-4ad1-a1cb-ceea97c3e466 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.883584] env[62585]: DEBUG oslo_vmware.api [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384755, 'name': PowerOffVM_Task, 'duration_secs': 0.259344} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.883833] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.884013] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.884674] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d20350d-f5d6-4cf5-8443-b11a52ff2635 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.889936] env[62585]: DEBUG oslo_vmware.api [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 857.889936] env[62585]: value = "task-1384757" [ 857.889936] env[62585]: _type = "Task" [ 857.889936] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.899844] env[62585]: DEBUG oslo_vmware.api [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384757, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.912183] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384753, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562706} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.912452] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] a634a80e-d90a-4ce3-8233-75657a7754be/a634a80e-d90a-4ce3-8233-75657a7754be.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.912670] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.912940] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b704dc16-e92c-4097-b58f-63c9977bb597 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.919531] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 857.919531] env[62585]: value = "task-1384759" [ 857.919531] env[62585]: _type = "Task" [ 857.919531] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.928560] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384759, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.943385] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b95890-650d-af4e-24a8-aef6756513e7, 'name': SearchDatastore_Task, 'duration_secs': 0.01656} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.943583] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.943775] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d2c6418c-b070-4c46-824b-18638e9b569f/d2c6418c-b070-4c46-824b-18638e9b569f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 857.944058] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.944275] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.944512] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfb78f30-d813-453a-a40d-b959efd47897 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.946839] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b22acc0-dee5-414e-9cab-6755c197fcb8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.951076] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.951345] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.951547] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleting the datastore file [datastore1] d644c700-c5d1-4549-b73b-0573f268dc40 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.951834] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-476ece16-d5f8-48ac-9017-b8b137a59c73 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.954880] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 857.954880] env[62585]: value = "task-1384760" [ 857.954880] env[62585]: _type = "Task" [ 857.954880] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.961084] env[62585]: DEBUG oslo_vmware.api [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for the task: (returnval){ [ 857.961084] env[62585]: value = "task-1384761" [ 857.961084] env[62585]: _type = "Task" [ 857.961084] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.961390] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.961607] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.962849] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e29d1717-98d8-4680-b8ce-657eee01d99b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.971741] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a42e963-713c-4c10-9271-18fc2673c25f req-58f59a95-f134-410a-9d05-4ae2ccbcfc30 service nova] Releasing lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.972166] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384760, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.975286] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 857.975286] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524a13e5-bb06-aa65-6b30-cafb3ece0122" [ 857.975286] env[62585]: _type = "Task" [ 857.975286] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.978320] env[62585]: DEBUG oslo_vmware.api [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.985903] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524a13e5-bb06-aa65-6b30-cafb3ece0122, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.359554] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d91e038-d872-4899-a8f5-d219a90978b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.367595] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ee2fc8-7010-4b4a-a211-660d7f82f6bf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.403681] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054a24a6-d7c7-4c7c-bddb-63ae263ef6c8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.415464] env[62585]: DEBUG oslo_vmware.api [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384757, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245305} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.415784] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.416059] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.416230] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.416406] env[62585]: INFO nova.compute.manager [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Took 1.14 seconds to destroy the instance on the hypervisor. [ 858.416641] env[62585]: DEBUG oslo.service.loopingcall [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.417884] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161d5181-6dc3-41e8-b5c7-0bee34fdb395 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.422090] env[62585]: DEBUG nova.compute.manager [-] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.422234] env[62585]: DEBUG nova.network.neutron [-] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.442683] env[62585]: DEBUG nova.compute.provider_tree [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.449510] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384759, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067866} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.450455] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.451264] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e00ef91-1365-48f4-97b2-6c54181bdf2e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.480225] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] a634a80e-d90a-4ce3-8233-75657a7754be/a634a80e-d90a-4ce3-8233-75657a7754be.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.484764] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1abdadb2-af20-4035-87e9-b6496a11dfd9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.504736] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384760, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508818} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.505753] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d2c6418c-b070-4c46-824b-18638e9b569f/d2c6418c-b070-4c46-824b-18638e9b569f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.506061] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.506659] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5dea9dda-94a1-471d-b2ed-143f727296ff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.517103] env[62585]: DEBUG oslo_vmware.api [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Task: {'id': task-1384761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218024} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.517384] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524a13e5-bb06-aa65-6b30-cafb3ece0122, 'name': SearchDatastore_Task, 'duration_secs': 0.020861} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.517655] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 858.517655] env[62585]: value = "task-1384762" [ 858.517655] env[62585]: _type = "Task" [ 858.517655] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.518989] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.519287] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.519499] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.519680] env[62585]: INFO nova.compute.manager [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Took 1.17 seconds to destroy the instance on the hypervisor. [ 858.519953] env[62585]: DEBUG oslo.service.loopingcall [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.520822] env[62585]: DEBUG nova.compute.manager [-] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.520915] env[62585]: DEBUG nova.network.neutron [-] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.522741] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a53fd222-f606-4e1f-a022-bc396cb8eaa2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.533779] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 858.533779] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520b03e4-8591-9967-45ac-13f987b45cff" [ 858.533779] env[62585]: _type = "Task" [ 858.533779] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.538390] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.538737] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 858.538737] env[62585]: value = "task-1384763" [ 858.538737] env[62585]: _type = "Task" [ 858.538737] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.552585] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520b03e4-8591-9967-45ac-13f987b45cff, 'name': SearchDatastore_Task, 'duration_secs': 0.012316} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.555719] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.556068] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 70ac6289-2f14-4fb0-a811-97d76cafc532/70ac6289-2f14-4fb0-a811-97d76cafc532.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.556364] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.556606] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c976cd1-ba28-41be-9841-85a9628a98e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.563989] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 858.563989] env[62585]: value = "task-1384764" [ 858.563989] env[62585]: _type = "Task" [ 858.563989] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.575146] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.763820] env[62585]: DEBUG nova.compute.manager [req-9141fe5b-d304-41b3-866c-2069bc7289e3 req-1a1dc20f-d443-4dfe-9f41-533bdb8b8964 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Received event network-vif-deleted-8360c89d-6755-4e11-b3fa-358072fa1c9b {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.766700] env[62585]: INFO nova.compute.manager [req-9141fe5b-d304-41b3-866c-2069bc7289e3 req-1a1dc20f-d443-4dfe-9f41-533bdb8b8964 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Neutron deleted interface 8360c89d-6755-4e11-b3fa-358072fa1c9b; detaching it from the instance and deleting it from the info cache [ 858.766700] env[62585]: DEBUG nova.network.neutron [req-9141fe5b-d304-41b3-866c-2069bc7289e3 req-1a1dc20f-d443-4dfe-9f41-533bdb8b8964 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.951053] env[62585]: DEBUG nova.scheduler.client.report [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.956455] env[62585]: DEBUG nova.compute.manager [req-d36bcb5e-e646-4d48-be26-7f4a3edfe633 req-f644985b-878d-4135-9b7f-91ec1aada5c8 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Received event network-vif-deleted-08ce05f4-704c-472a-a234-8eb9ac7e4856 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.956668] env[62585]: INFO nova.compute.manager [req-d36bcb5e-e646-4d48-be26-7f4a3edfe633 req-f644985b-878d-4135-9b7f-91ec1aada5c8 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Neutron deleted interface 08ce05f4-704c-472a-a234-8eb9ac7e4856; detaching it from the instance and deleting it from the info cache [ 858.956843] env[62585]: DEBUG nova.network.neutron [req-d36bcb5e-e646-4d48-be26-7f4a3edfe633 req-f644985b-878d-4135-9b7f-91ec1aada5c8 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.032248] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.052385] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075284} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.053212] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.053593] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8156b0d-c6c9-41e2-8357-6f39ddc8ad55 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.083420] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] d2c6418c-b070-4c46-824b-18638e9b569f/d2c6418c-b070-4c46-824b-18638e9b569f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.087214] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05bea76b-908a-4d61-8a71-2c30e5c768a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.109272] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509963} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.109582] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 70ac6289-2f14-4fb0-a811-97d76cafc532/70ac6289-2f14-4fb0-a811-97d76cafc532.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.109851] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.110228] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88762f8c-ecf9-4bdc-b81f-e8f904339e94 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.113709] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 859.113709] env[62585]: value = "task-1384765" [ 859.113709] env[62585]: _type = "Task" [ 859.113709] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.123810] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384765, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.124144] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 859.124144] env[62585]: value = "task-1384766" [ 859.124144] env[62585]: _type = "Task" [ 859.124144] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.133344] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384766, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.233711] env[62585]: DEBUG nova.network.neutron [-] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.267998] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a957f2af-9ac0-447e-9708-7784df6b8e8a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.277959] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9053110-1bae-457b-b036-12501b609c7f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.306303] env[62585]: DEBUG nova.compute.manager [req-9141fe5b-d304-41b3-866c-2069bc7289e3 req-1a1dc20f-d443-4dfe-9f41-533bdb8b8964 service nova] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Detach interface failed, port_id=8360c89d-6755-4e11-b3fa-358072fa1c9b, reason: Instance 67e5af2f-4eec-41ec-916f-9f9b77596943 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 859.373707] env[62585]: DEBUG nova.network.neutron [-] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.460267] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.460562] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 859.463967] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.346s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.465532] env[62585]: INFO nova.compute.claims [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.468330] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b886094a-65bf-461e-9626-0f527433e8e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.477646] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90e61f3-f295-4ae4-a15c-a9207f42818c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.505378] env[62585]: DEBUG nova.compute.manager [req-d36bcb5e-e646-4d48-be26-7f4a3edfe633 req-f644985b-878d-4135-9b7f-91ec1aada5c8 service nova] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Detach interface failed, port_id=08ce05f4-704c-472a-a234-8eb9ac7e4856, reason: Instance d644c700-c5d1-4549-b73b-0573f268dc40 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 859.530433] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384762, 'name': ReconfigVM_Task, 'duration_secs': 0.598998} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.530776] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Reconfigured VM instance instance-0000004b to attach disk [datastore1] a634a80e-d90a-4ce3-8233-75657a7754be/a634a80e-d90a-4ce3-8233-75657a7754be.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.531478] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a22883a4-f19d-487d-841e-e3828893b552 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.537636] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 859.537636] env[62585]: value = "task-1384767" [ 859.537636] env[62585]: _type = "Task" [ 859.537636] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.546513] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384767, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.623516] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384765, 'name': ReconfigVM_Task, 'duration_secs': 0.324123} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.623867] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Reconfigured VM instance instance-0000004a to attach disk [datastore1] d2c6418c-b070-4c46-824b-18638e9b569f/d2c6418c-b070-4c46-824b-18638e9b569f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.624639] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7842520e-a1e8-4811-a178-84950dca8756 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.634699] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384766, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07635} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.635872] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.636273] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 859.636273] env[62585]: value = "task-1384768" [ 859.636273] env[62585]: _type = "Task" [ 859.636273] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.636991] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40939216-4710-4409-9fd8-6f7016d59374 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.664959] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 70ac6289-2f14-4fb0-a811-97d76cafc532/70ac6289-2f14-4fb0-a811-97d76cafc532.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.668881] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44e14a62-8ef5-479b-a39a-108d2b28ecf0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.683502] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384768, 'name': Rename_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.689177] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 859.689177] env[62585]: value = "task-1384769" [ 859.689177] env[62585]: _type = "Task" [ 859.689177] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.698457] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384769, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.737228] env[62585]: INFO nova.compute.manager [-] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Took 1.31 seconds to deallocate network for instance. [ 859.876334] env[62585]: INFO nova.compute.manager [-] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Took 1.36 seconds to deallocate network for instance. [ 859.966551] env[62585]: DEBUG nova.compute.utils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.967148] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 859.968046] env[62585]: DEBUG nova.network.neutron [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.047845] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384767, 'name': Rename_Task, 'duration_secs': 0.192105} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.048151] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.048428] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f751d4b-1189-4260-9784-75774e10b503 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.053282] env[62585]: DEBUG nova.policy [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4428eefffda84fd18792c2bd26c4c861', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36c76fd292d84bbe97c7221e75831fbb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.055988] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 860.055988] env[62585]: value = "task-1384770" [ 860.055988] env[62585]: _type = "Task" [ 860.055988] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.063704] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.151458] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384768, 'name': Rename_Task, 'duration_secs': 0.142442} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.151743] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.152014] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9aa96751-8614-45a2-969d-1cd9d7a6799e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.158630] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 860.158630] env[62585]: value = "task-1384771" [ 860.158630] env[62585]: _type = "Task" [ 860.158630] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.166883] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.199893] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384769, 'name': ReconfigVM_Task, 'duration_secs': 0.282744} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.200485] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 70ac6289-2f14-4fb0-a811-97d76cafc532/70ac6289-2f14-4fb0-a811-97d76cafc532.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.201727] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a46177cf-d9d5-47d3-8666-bbba38a70968 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.208751] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 860.208751] env[62585]: value = "task-1384772" [ 860.208751] env[62585]: _type = "Task" [ 860.208751] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.215784] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384772, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.244243] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.380380] env[62585]: DEBUG nova.network.neutron [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Successfully created port: 825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.383134] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.470662] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 860.566501] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384770, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.671908] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384771, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.701799] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b16c8af-add3-42e1-ac78-4d9e4951d0e4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.716149] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6735c4d-010e-4a32-a833-53af1bbd1a8e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.723543] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384772, 'name': Rename_Task, 'duration_secs': 0.149662} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.749965] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.750587] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df6ceb41-6443-4f97-bb38-d6cf8b3f1556 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.752801] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1cf071-39f9-4342-acb9-6b715d14d990 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.762044] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b382cce-8eac-467c-9452-e23c90f3ca45 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.765754] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 860.765754] env[62585]: value = "task-1384773" [ 860.765754] env[62585]: _type = "Task" [ 860.765754] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.776670] env[62585]: DEBUG nova.compute.provider_tree [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.782791] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384773, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.069213] env[62585]: DEBUG oslo_vmware.api [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384770, 'name': PowerOnVM_Task, 'duration_secs': 0.545727} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.069695] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.070066] env[62585]: INFO nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Took 9.02 seconds to spawn the instance on the hypervisor. [ 861.070380] env[62585]: DEBUG nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.071624] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cc51d4-efbd-4d57-81c8-d8ba322a338e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.169472] env[62585]: DEBUG oslo_vmware.api [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384771, 'name': PowerOnVM_Task, 'duration_secs': 0.517224} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.169908] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.170131] env[62585]: INFO nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Took 11.46 seconds to spawn the instance on the hypervisor. [ 861.170131] env[62585]: DEBUG nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.170955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a0094a-a06f-4ab3-8db0-aa5d4e6e6c71 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.277040] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384773, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.280703] env[62585]: DEBUG nova.scheduler.client.report [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 861.481496] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 861.509547] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 861.509802] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 861.509990] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.510227] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 861.510403] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.511330] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 861.511330] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 861.511330] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 861.511660] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 861.511764] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 861.511951] env[62585]: DEBUG nova.virt.hardware [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.512970] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a41d2e-299c-41f1-a4cf-cb6244066216 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.522072] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b23045-7457-4fa8-98a2-233e77a504d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.593130] env[62585]: INFO nova.compute.manager [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Took 24.45 seconds to build instance. [ 861.692424] env[62585]: INFO nova.compute.manager [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Took 26.04 seconds to build instance. [ 861.776337] env[62585]: DEBUG oslo_vmware.api [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384773, 'name': PowerOnVM_Task, 'duration_secs': 0.651315} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.776659] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.776869] env[62585]: INFO nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Took 7.42 seconds to spawn the instance on the hypervisor. [ 861.777059] env[62585]: DEBUG nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.777825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4865f2-fc6e-45f4-841e-c28d1955cbc3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.790764] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.791273] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 861.793938] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.947s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.799100] env[62585]: INFO nova.compute.claims [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.096957] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8342859b-b1d8-4d79-9093-b15b9cba1c58 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "a634a80e-d90a-4ce3-8233-75657a7754be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.762s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.194960] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aa4f5a6b-00f5-435e-9711-67eccdd95793 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "d2c6418c-b070-4c46-824b-18638e9b569f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.370s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.295173] env[62585]: INFO nova.compute.manager [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Took 23.50 seconds to build instance. [ 862.306646] env[62585]: DEBUG nova.compute.utils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.310351] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 862.310527] env[62585]: DEBUG nova.network.neutron [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.472431] env[62585]: DEBUG nova.policy [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9a2336e0b124f03ad700405bcad8f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b8936eaf754cbcbd1b099846a3146d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.552244] env[62585]: DEBUG nova.compute.manager [req-3454ecaa-59b5-4aab-ab1d-369db825f107 req-5ca4ce8d-31e6-46db-a18d-73ab7b2c8d1f service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Received event network-vif-plugged-825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.552244] env[62585]: DEBUG oslo_concurrency.lockutils [req-3454ecaa-59b5-4aab-ab1d-369db825f107 req-5ca4ce8d-31e6-46db-a18d-73ab7b2c8d1f service nova] Acquiring lock "b2d2a012-a62f-4237-95c3-d7153d6b223c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.552244] env[62585]: DEBUG oslo_concurrency.lockutils [req-3454ecaa-59b5-4aab-ab1d-369db825f107 req-5ca4ce8d-31e6-46db-a18d-73ab7b2c8d1f service nova] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.552244] env[62585]: DEBUG oslo_concurrency.lockutils [req-3454ecaa-59b5-4aab-ab1d-369db825f107 req-5ca4ce8d-31e6-46db-a18d-73ab7b2c8d1f service nova] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.553459] env[62585]: DEBUG nova.compute.manager [req-3454ecaa-59b5-4aab-ab1d-369db825f107 req-5ca4ce8d-31e6-46db-a18d-73ab7b2c8d1f service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] No waiting events found dispatching network-vif-plugged-825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 862.553459] env[62585]: WARNING nova.compute.manager [req-3454ecaa-59b5-4aab-ab1d-369db825f107 req-5ca4ce8d-31e6-46db-a18d-73ab7b2c8d1f service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Received unexpected event network-vif-plugged-825c7c78-f998-4431-87b7-55f49c79830f for instance with vm_state building and task_state spawning. [ 862.558173] env[62585]: DEBUG nova.network.neutron [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Successfully updated port: 825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 862.797131] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b71c719-eec5-47ed-8097-b8253568b0ae tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.095s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.813522] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 863.034970] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c17076-ebfc-4914-b2ec-33d3e9dcf38d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.043761] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d09564-c4dc-49a8-8298-5835df978bf6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.075808] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.076046] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.076247] env[62585]: DEBUG nova.network.neutron [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.079727] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5ddd03-193c-4be2-afe8-8541c9e5acfb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.091019] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e50fc52-8985-49a0-a4c5-9ae950025c98 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.102693] env[62585]: DEBUG nova.compute.provider_tree [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.105266] env[62585]: DEBUG nova.network.neutron [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Successfully created port: c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.171276] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "d2c6418c-b070-4c46-824b-18638e9b569f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.171593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "d2c6418c-b070-4c46-824b-18638e9b569f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.171809] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.173090] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.173090] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "d2c6418c-b070-4c46-824b-18638e9b569f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.174461] env[62585]: INFO nova.compute.manager [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Terminating instance [ 863.176468] env[62585]: DEBUG nova.compute.manager [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 863.178150] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 863.178815] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297b70ee-1096-42aa-ad2a-3d7288f8c3e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.186101] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 863.186394] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53df5d87-ad1f-4a15-aec8-ecbcf1ba9caa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.194080] env[62585]: DEBUG oslo_vmware.api [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 863.194080] env[62585]: value = "task-1384774" [ 863.194080] env[62585]: _type = "Task" [ 863.194080] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.207021] env[62585]: DEBUG oslo_vmware.api [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384774, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.372183] env[62585]: DEBUG nova.compute.manager [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-changed-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.372183] env[62585]: DEBUG nova.compute.manager [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing instance network info cache due to event network-changed-840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 863.372183] env[62585]: DEBUG oslo_concurrency.lockutils [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.372406] env[62585]: DEBUG oslo_concurrency.lockutils [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.372406] env[62585]: DEBUG nova.network.neutron [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing network info cache for port 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.608603] env[62585]: DEBUG nova.scheduler.client.report [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 863.628296] env[62585]: DEBUG nova.network.neutron [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.704746] env[62585]: DEBUG oslo_vmware.api [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384774, 'name': PowerOffVM_Task, 'duration_secs': 0.234098} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.705058] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 863.705238] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 863.705720] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-759fd4cd-0172-4f97-884a-96f00726caca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.817379] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 863.817379] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 863.817569] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Deleting the datastore file [datastore1] d2c6418c-b070-4c46-824b-18638e9b569f {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.817811] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd8be195-933e-443f-a64d-40afe3978995 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.825297] env[62585]: DEBUG oslo_vmware.api [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for the task: (returnval){ [ 863.825297] env[62585]: value = "task-1384776" [ 863.825297] env[62585]: _type = "Task" [ 863.825297] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.829912] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 863.839024] env[62585]: DEBUG oslo_vmware.api [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.861900] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.861900] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.861900] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.862131] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.862225] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.862329] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.862543] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.862704] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.862900] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.863158] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.863355] env[62585]: DEBUG nova.virt.hardware [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.864231] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb2f38c-a291-4cb0-94b0-c7332a66e952 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.869086] env[62585]: DEBUG nova.network.neutron [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updating instance_info_cache with network_info: [{"id": "825c7c78-f998-4431-87b7-55f49c79830f", "address": "fa:16:3e:a6:7e:37", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c7c78-f9", "ovs_interfaceid": "825c7c78-f998-4431-87b7-55f49c79830f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.883956] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6705480-3f67-4264-bcf0-79e40fbf038b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.117073] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.117073] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 864.121117] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.255s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.122939] env[62585]: INFO nova.compute.claims [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.206963] env[62585]: DEBUG nova.network.neutron [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updated VIF entry in instance network info cache for port 840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 864.207432] env[62585]: DEBUG nova.network.neutron [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.335259] env[62585]: DEBUG oslo_vmware.api [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Task: {'id': task-1384776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30902} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.335719] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.335889] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.336275] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.336475] env[62585]: INFO nova.compute.manager [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 864.336729] env[62585]: DEBUG oslo.service.loopingcall [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.336957] env[62585]: DEBUG nova.compute.manager [-] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.337066] env[62585]: DEBUG nova.network.neutron [-] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.374902] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.375213] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Instance network_info: |[{"id": "825c7c78-f998-4431-87b7-55f49c79830f", "address": "fa:16:3e:a6:7e:37", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c7c78-f9", "ovs_interfaceid": "825c7c78-f998-4431-87b7-55f49c79830f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 864.375691] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:7e:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '825c7c78-f998-4431-87b7-55f49c79830f', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 864.383301] env[62585]: DEBUG oslo.service.loopingcall [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.384705] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 864.387681] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-353c2c55-92a4-407a-a367-32a4fbf0d430 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.402513] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.402766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "ddb1103d-a846-4229-b441-de45424b4ec9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.409469] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 864.409469] env[62585]: value = "task-1384777" [ 864.409469] env[62585]: _type = "Task" [ 864.409469] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.417434] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384777, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.575098] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c365af-3f56-c91a-d04e-400d11af09ba/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 864.576493] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4804175e-16d6-43fe-876c-6965de9c16a5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.584601] env[62585]: DEBUG nova.compute.manager [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Received event network-changed-825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.585241] env[62585]: DEBUG nova.compute.manager [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Refreshing instance network info cache due to event network-changed-825c7c78-f998-4431-87b7-55f49c79830f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 864.585241] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] Acquiring lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.585425] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] Acquired lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.585640] env[62585]: DEBUG nova.network.neutron [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Refreshing network info cache for port 825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.589417] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c365af-3f56-c91a-d04e-400d11af09ba/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 864.589654] env[62585]: ERROR oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c365af-3f56-c91a-d04e-400d11af09ba/disk-0.vmdk due to incomplete transfer. [ 864.589919] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b5ece73a-2f3c-4562-bb45-686f5ecb49b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.598716] env[62585]: DEBUG oslo_vmware.rw_handles [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c365af-3f56-c91a-d04e-400d11af09ba/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 864.598933] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Uploaded image 6d23e817-7b18-4e33-b9b5-50d1b647249d to the Glance image server {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 864.601451] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Destroying the VM {{(pid=62585) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 864.601687] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4071def3-301c-4c12-a94c-0d62dc92af8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.608338] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 864.608338] env[62585]: value = "task-1384778" [ 864.608338] env[62585]: _type = "Task" [ 864.608338] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.616873] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384778, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.627881] env[62585]: DEBUG nova.compute.utils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.629323] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 864.629490] env[62585]: DEBUG nova.network.neutron [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.710366] env[62585]: DEBUG oslo_concurrency.lockutils [req-62ab5758-bd9c-4815-b3d1-b65e41197aef req-4fabe145-99b8-4fae-9e01-0e35e604f57c service nova] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.737680] env[62585]: DEBUG nova.policy [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b596d143eaf450e97e982b0d4ff1b50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34c6f21d288e47dd94ccbe12526fe4e8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 864.864859] env[62585]: DEBUG nova.network.neutron [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Successfully updated port: c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.905591] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 864.924221] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384777, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.097995] env[62585]: DEBUG nova.network.neutron [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Successfully created port: 3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.117522] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384778, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.138625] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 865.322605] env[62585]: DEBUG nova.network.neutron [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updated VIF entry in instance network info cache for port 825c7c78-f998-4431-87b7-55f49c79830f. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.323057] env[62585]: DEBUG nova.network.neutron [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updating instance_info_cache with network_info: [{"id": "825c7c78-f998-4431-87b7-55f49c79830f", "address": "fa:16:3e:a6:7e:37", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c7c78-f9", "ovs_interfaceid": "825c7c78-f998-4431-87b7-55f49c79830f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.370670] env[62585]: DEBUG nova.network.neutron [-] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.372058] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.372210] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.372349] env[62585]: DEBUG nova.network.neutron [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.400990] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310a996b-e602-4803-a90a-d6e3a73d168c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.405497] env[62585]: DEBUG nova.compute.manager [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Received event network-vif-plugged-c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.405709] env[62585]: DEBUG oslo_concurrency.lockutils [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] Acquiring lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.405918] env[62585]: DEBUG oslo_concurrency.lockutils [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.406099] env[62585]: DEBUG oslo_concurrency.lockutils [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.406270] env[62585]: DEBUG nova.compute.manager [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] No waiting events found dispatching network-vif-plugged-c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 865.406434] env[62585]: WARNING nova.compute.manager [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Received unexpected event network-vif-plugged-c92c2d1e-3117-42a1-a5d2-3de9eba6e107 for instance with vm_state building and task_state spawning. [ 865.406592] env[62585]: DEBUG nova.compute.manager [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Received event network-changed-c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.406746] env[62585]: DEBUG nova.compute.manager [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Refreshing instance network info cache due to event network-changed-c92c2d1e-3117-42a1-a5d2-3de9eba6e107. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 865.406970] env[62585]: DEBUG oslo_concurrency.lockutils [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] Acquiring lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.417463] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c078c2-ba1d-4209-939c-845ad808f6b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.429907] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384777, 'name': CreateVM_Task, 'duration_secs': 0.928213} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.454120] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.455672] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.456351] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.456518] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.456841] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 865.457610] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37db5759-995e-4932-b18d-bbd634544d18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.460051] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b710f2b1-408c-4a8c-a5dc-587fbcb4773a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.467856] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226df179-a3d1-447d-ade2-53b490d739b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.471683] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 865.471683] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5241b606-31d1-0117-1744-dc6e6a49cef6" [ 865.471683] env[62585]: _type = "Task" [ 865.471683] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.482922] env[62585]: DEBUG nova.compute.provider_tree [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.489133] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5241b606-31d1-0117-1744-dc6e6a49cef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.618206] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384778, 'name': Destroy_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.827202] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] Releasing lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.827519] env[62585]: DEBUG nova.compute.manager [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Received event network-changed-acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.827707] env[62585]: DEBUG nova.compute.manager [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Refreshing instance network info cache due to event network-changed-acdb870f-a3ba-445e-96f3-64fdd59c10a8. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 865.827930] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] Acquiring lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.828098] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] Acquired lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.828303] env[62585]: DEBUG nova.network.neutron [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Refreshing network info cache for port acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.874687] env[62585]: INFO nova.compute.manager [-] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Took 1.54 seconds to deallocate network for instance. [ 865.923881] env[62585]: DEBUG nova.network.neutron [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.986550] env[62585]: DEBUG nova.scheduler.client.report [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 865.989740] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5241b606-31d1-0117-1744-dc6e6a49cef6, 'name': SearchDatastore_Task, 'duration_secs': 0.02725} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.990513] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.990562] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 865.992754] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.992754] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.992754] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.992754] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5909b87-e303-4372-953c-dd12066f46bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.999676] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.999839] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.001147] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6485c0a8-9f47-40bd-ab0e-caae918adaf5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.007440] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 866.007440] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5236fdee-d7e5-e2a0-97bc-6bb3144d7382" [ 866.007440] env[62585]: _type = "Task" [ 866.007440] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.015176] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5236fdee-d7e5-e2a0-97bc-6bb3144d7382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.040300] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "679380d4-5b96-4c30-bac9-f7163f19c609" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.040709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "679380d4-5b96-4c30-bac9-f7163f19c609" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.041166] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "679380d4-5b96-4c30-bac9-f7163f19c609-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.041166] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "679380d4-5b96-4c30-bac9-f7163f19c609-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.041166] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "679380d4-5b96-4c30-bac9-f7163f19c609-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.043669] env[62585]: INFO nova.compute.manager [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Terminating instance [ 866.047129] env[62585]: DEBUG nova.compute.manager [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 866.047129] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.047129] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da203b4-b668-4946-ba6e-67f1cda51671 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.055906] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.055906] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d345ec2-b195-47b5-8173-608eabc78269 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.119840] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384778, 'name': Destroy_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.120918] env[62585]: DEBUG nova.network.neutron [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [{"id": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "address": "fa:16:3e:35:20:3f", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc92c2d1e-31", "ovs_interfaceid": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.129455] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.129683] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.129877] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleting the datastore file [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.130156] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0de4da8c-70a7-4be4-9fac-02e7ea562591 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.136871] env[62585]: DEBUG oslo_vmware.api [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 866.136871] env[62585]: value = "task-1384780" [ 866.136871] env[62585]: _type = "Task" [ 866.136871] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.145870] env[62585]: DEBUG oslo_vmware.api [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.146860] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 866.168636] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.168939] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.169323] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.169648] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.169879] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.170230] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.170529] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.170728] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.170923] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.171129] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.171364] env[62585]: DEBUG nova.virt.hardware [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.172211] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f139b4ef-c6dd-45ad-834c-cd73244f303f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.180818] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f966d85-165a-4c24-9482-345501712413 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.383410] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.490960] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.491546] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 866.494157] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.629s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.495539] env[62585]: INFO nova.compute.claims [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.518976] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5236fdee-d7e5-e2a0-97bc-6bb3144d7382, 'name': SearchDatastore_Task, 'duration_secs': 0.014197} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.523360] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69b6324e-dd9f-4295-84d3-a60c94166d91 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.526360] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "891e5a42-3681-47eb-ac88-015fa21a6580" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.526592] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "891e5a42-3681-47eb-ac88-015fa21a6580" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.531126] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 866.531126] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5264defa-b878-2062-9d83-53c581738945" [ 866.531126] env[62585]: _type = "Task" [ 866.531126] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.546043] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5264defa-b878-2062-9d83-53c581738945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.578666] env[62585]: DEBUG nova.network.neutron [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updated VIF entry in instance network info cache for port acdb870f-a3ba-445e-96f3-64fdd59c10a8. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.579088] env[62585]: DEBUG nova.network.neutron [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updating instance_info_cache with network_info: [{"id": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "address": "fa:16:3e:1c:e9:bf", "network": {"id": "87a03b94-3a0b-4ec4-92a2-a71a086076ac", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-309846620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44e32d293ad64cd499926859857e023e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacdb870f-a3", "ovs_interfaceid": "acdb870f-a3ba-445e-96f3-64fdd59c10a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.611849] env[62585]: DEBUG nova.compute.manager [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received event network-vif-deleted-8655c15e-04e0-4f9c-9b74-c037b8553046 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.612041] env[62585]: DEBUG nova.compute.manager [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Received event network-vif-deleted-3e63b09d-2011-40ad-b8e0-37ee3a4a5a6a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.612218] env[62585]: DEBUG nova.compute.manager [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Received event network-vif-plugged-3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.612443] env[62585]: DEBUG oslo_concurrency.lockutils [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] Acquiring lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.612655] env[62585]: DEBUG oslo_concurrency.lockutils [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.612823] env[62585]: DEBUG oslo_concurrency.lockutils [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.612986] env[62585]: DEBUG nova.compute.manager [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] No waiting events found dispatching network-vif-plugged-3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.613171] env[62585]: WARNING nova.compute.manager [req-8d2a28e7-51ef-4b99-8b86-4afe1be980b3 req-6dd596ee-ebc9-427c-ac5e-2bed7f3cc9ee service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Received unexpected event network-vif-plugged-3ce4bc74-6a0f-47ca-a5cc-a709b648910d for instance with vm_state building and task_state spawning. [ 866.622893] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384778, 'name': Destroy_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.626669] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.627091] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Instance network_info: |[{"id": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "address": "fa:16:3e:35:20:3f", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc92c2d1e-31", "ovs_interfaceid": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 866.627255] env[62585]: DEBUG oslo_concurrency.lockutils [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] Acquired lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.627436] env[62585]: DEBUG nova.network.neutron [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Refreshing network info cache for port c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.628499] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:20:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c92c2d1e-3117-42a1-a5d2-3de9eba6e107', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.635797] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Creating folder: Project (19b8936eaf754cbcbd1b099846a3146d). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.638696] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af05bac1-b113-4f72-8e71-9be2389cc36a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.644068] env[62585]: DEBUG nova.network.neutron [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Successfully updated port: 3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.651679] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609 [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Traceback (most recent call last): [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] ds_util.file_delete(self._session, [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] session._wait_for_task(file_delete_task) [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] return self.wait_for_task(task_ref) [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] return evt.wait() [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] result = hub.switch() [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] return self.greenlet.switch() [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] self.f(*self.args, **self.kw) [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] raise exceptions.translate_fault(task_info.error) [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore2] 679380d4-5b96-4c30-bac9-f7163f19c609 [ 866.651679] env[62585]: ERROR nova.virt.vmwareapi.vmops [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] [ 866.652583] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 866.652583] env[62585]: INFO nova.compute.manager [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Took 0.61 seconds to destroy the instance on the hypervisor. [ 866.652583] env[62585]: DEBUG oslo.service.loopingcall [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.652583] env[62585]: DEBUG nova.compute.manager [-] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 866.652753] env[62585]: DEBUG nova.network.neutron [-] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.655821] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Created folder: Project (19b8936eaf754cbcbd1b099846a3146d) in parent group-v293962. [ 866.656012] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Creating folder: Instances. Parent ref: group-v294016. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.656699] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83b8ecd7-9179-42bc-8343-d1402949599d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.666490] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Created folder: Instances in parent group-v294016. [ 866.666714] env[62585]: DEBUG oslo.service.loopingcall [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.666893] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.667103] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cea25748-5c18-43c3-a99f-34b3a2b910b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.686775] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.686775] env[62585]: value = "task-1384783" [ 866.686775] env[62585]: _type = "Task" [ 866.686775] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.695483] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384783, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.870108] env[62585]: DEBUG nova.network.neutron [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updated VIF entry in instance network info cache for port c92c2d1e-3117-42a1-a5d2-3de9eba6e107. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.870491] env[62585]: DEBUG nova.network.neutron [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [{"id": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "address": "fa:16:3e:35:20:3f", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc92c2d1e-31", "ovs_interfaceid": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.001031] env[62585]: DEBUG nova.compute.utils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.008135] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 867.008485] env[62585]: DEBUG nova.network.neutron [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.029193] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 867.042314] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5264defa-b878-2062-9d83-53c581738945, 'name': SearchDatastore_Task, 'duration_secs': 0.024923} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.042602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.042855] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] b2d2a012-a62f-4237-95c3-d7153d6b223c/b2d2a012-a62f-4237-95c3-d7153d6b223c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.043134] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24ec8662-1b5d-4382-aabf-02295faa1770 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.050850] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 867.050850] env[62585]: value = "task-1384784" [ 867.050850] env[62585]: _type = "Task" [ 867.050850] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.059919] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.074552] env[62585]: DEBUG nova.policy [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 867.082085] env[62585]: DEBUG oslo_concurrency.lockutils [req-2ef0302d-81d8-415c-9de9-4c70188bbb13 req-765025e4-e5d5-4f34-99ae-91fd91f80780 service nova] Releasing lock "refresh_cache-70ac6289-2f14-4fb0-a811-97d76cafc532" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.127905] env[62585]: DEBUG oslo_vmware.api [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384778, 'name': Destroy_Task, 'duration_secs': 2.273337} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.128262] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Destroyed the VM [ 867.128669] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Deleting Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 867.129021] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-73b3516f-f1ef-4a3b-83c7-244b37a4e2c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.134218] env[62585]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 867.134371] env[62585]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62585) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 867.134757] env[62585]: DEBUG nova.compute.utils [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Cleaning up image 6d23e817-7b18-4e33-b9b5-50d1b647249d {{(pid=62585) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 867.148397] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "refresh_cache-62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.148397] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "refresh_cache-62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.148397] env[62585]: DEBUG nova.network.neutron [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.197983] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384783, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.363935] env[62585]: DEBUG nova.network.neutron [-] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.374082] env[62585]: DEBUG oslo_concurrency.lockutils [req-9e2b93c0-4c55-4847-bfee-f2cfb8b4ba06 req-1986b022-38b3-44b1-98ed-16e0ef3d6048 service nova] Releasing lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.512466] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 867.557552] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.565411] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384784, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.624808] env[62585]: DEBUG nova.network.neutron [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Successfully created port: 3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.699631] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384783, 'name': CreateVM_Task, 'duration_secs': 0.957016} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.700430] env[62585]: DEBUG nova.network.neutron [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.702045] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.702948] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.703143] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.703475] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.705977] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c697e67-de16-4a15-89d8-53352318ad68 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.710720] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 867.710720] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]521b892c-c5f2-ce89-115d-51323dd2624f" [ 867.710720] env[62585]: _type = "Task" [ 867.710720] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.718615] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]521b892c-c5f2-ce89-115d-51323dd2624f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.766397] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67f70db-6739-4f06-8901-6a35de323105 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.775763] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914d9b79-54ac-429c-b25a-5b0a1f8922aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.806621] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcc0421-a463-487f-b675-0daca9a54fe8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.818085] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e554d1-fe04-4a6d-b6a4-0258c80f5d49 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.831265] env[62585]: DEBUG nova.compute.provider_tree [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.852691] env[62585]: DEBUG nova.network.neutron [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Updating instance_info_cache with network_info: [{"id": "3ce4bc74-6a0f-47ca-a5cc-a709b648910d", "address": "fa:16:3e:56:b0:3e", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ce4bc74-6a", "ovs_interfaceid": "3ce4bc74-6a0f-47ca-a5cc-a709b648910d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.867105] env[62585]: INFO nova.compute.manager [-] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Took 1.21 seconds to deallocate network for instance. [ 868.062164] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559625} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.062450] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] b2d2a012-a62f-4237-95c3-d7153d6b223c/b2d2a012-a62f-4237-95c3-d7153d6b223c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.062665] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.062917] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e67ca9b-6009-4778-80dd-e0df7be9ba7b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.069758] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 868.069758] env[62585]: value = "task-1384785" [ 868.069758] env[62585]: _type = "Task" [ 868.069758] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.077788] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.221205] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]521b892c-c5f2-ce89-115d-51323dd2624f, 'name': SearchDatastore_Task, 'duration_secs': 0.010783} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.221205] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.221205] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.221606] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.221606] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.221606] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.223401] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51465455-fd17-4260-bcfc-e5e36cdaf2ad {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.233019] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.233019] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.233479] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-372122d1-249c-4aa1-a326-cc828ff350e4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.238624] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 868.238624] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e34946-d11d-c361-30d7-ac4597cc2709" [ 868.238624] env[62585]: _type = "Task" [ 868.238624] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.246296] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e34946-d11d-c361-30d7-ac4597cc2709, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.334259] env[62585]: DEBUG nova.scheduler.client.report [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.354371] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "refresh_cache-62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.354681] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Instance network_info: |[{"id": "3ce4bc74-6a0f-47ca-a5cc-a709b648910d", "address": "fa:16:3e:56:b0:3e", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ce4bc74-6a", "ovs_interfaceid": "3ce4bc74-6a0f-47ca-a5cc-a709b648910d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 868.355105] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:b0:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ce4bc74-6a0f-47ca-a5cc-a709b648910d', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 868.362589] env[62585]: DEBUG oslo.service.loopingcall [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.363907] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 868.363907] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fabef46e-62a6-4c1c-8057-c5124568401d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.379682] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.384876] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.384876] env[62585]: value = "task-1384786" [ 868.384876] env[62585]: _type = "Task" [ 868.384876] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.395360] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384786, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.524888] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 868.551285] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 868.551595] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 868.551789] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.551975] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 868.552126] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.552323] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 868.552532] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 868.552715] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 868.552897] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 868.553084] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 868.553271] env[62585]: DEBUG nova.virt.hardware [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.554183] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e058bc-9736-43ea-a320-6e53f6d212df {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.563030] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d645959-36b1-407e-a079-e90042051451 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.585961] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07459} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.586272] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.587071] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3971fb-6ab9-4dd1-9c85-c38c6bd9c296 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.609622] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] b2d2a012-a62f-4237-95c3-d7153d6b223c/b2d2a012-a62f-4237-95c3-d7153d6b223c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.609950] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f18e97b-6162-4024-af96-71b6dc79eff9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.629648] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 868.629648] env[62585]: value = "task-1384787" [ 868.629648] env[62585]: _type = "Task" [ 868.629648] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.638678] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384787, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.641293] env[62585]: DEBUG nova.compute.manager [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Received event network-changed-3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.641579] env[62585]: DEBUG nova.compute.manager [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Refreshing instance network info cache due to event network-changed-3ce4bc74-6a0f-47ca-a5cc-a709b648910d. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.641867] env[62585]: DEBUG oslo_concurrency.lockutils [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] Acquiring lock "refresh_cache-62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.642051] env[62585]: DEBUG oslo_concurrency.lockutils [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] Acquired lock "refresh_cache-62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.642222] env[62585]: DEBUG nova.network.neutron [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Refreshing network info cache for port 3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.674416] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.749287] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e34946-d11d-c361-30d7-ac4597cc2709, 'name': SearchDatastore_Task, 'duration_secs': 0.020204} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.750265] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0378449-f010-424a-9e0c-83c1ec5aad7d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.755731] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 868.755731] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5229cf54-56eb-c546-d2bf-25842424bdff" [ 868.755731] env[62585]: _type = "Task" [ 868.755731] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.764170] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5229cf54-56eb-c546-d2bf-25842424bdff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.840308] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.840929] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 868.843843] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.600s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.844141] env[62585]: DEBUG nova.objects.instance [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lazy-loading 'resources' on Instance uuid 67e5af2f-4eec-41ec-916f-9f9b77596943 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.895486] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384786, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.073581] env[62585]: DEBUG nova.compute.manager [req-7671d94e-a4d7-4e63-b9af-096e89abc544 req-75ffa286-c635-4842-8ca2-67bfa5aa51ad service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Received event network-vif-plugged-3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.073819] env[62585]: DEBUG oslo_concurrency.lockutils [req-7671d94e-a4d7-4e63-b9af-096e89abc544 req-75ffa286-c635-4842-8ca2-67bfa5aa51ad service nova] Acquiring lock "95de3c81-b764-4594-af86-66df7814d7aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.074044] env[62585]: DEBUG oslo_concurrency.lockutils [req-7671d94e-a4d7-4e63-b9af-096e89abc544 req-75ffa286-c635-4842-8ca2-67bfa5aa51ad service nova] Lock "95de3c81-b764-4594-af86-66df7814d7aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.074219] env[62585]: DEBUG oslo_concurrency.lockutils [req-7671d94e-a4d7-4e63-b9af-096e89abc544 req-75ffa286-c635-4842-8ca2-67bfa5aa51ad service nova] Lock "95de3c81-b764-4594-af86-66df7814d7aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.074396] env[62585]: DEBUG nova.compute.manager [req-7671d94e-a4d7-4e63-b9af-096e89abc544 req-75ffa286-c635-4842-8ca2-67bfa5aa51ad service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] No waiting events found dispatching network-vif-plugged-3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 869.074561] env[62585]: WARNING nova.compute.manager [req-7671d94e-a4d7-4e63-b9af-096e89abc544 req-75ffa286-c635-4842-8ca2-67bfa5aa51ad service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Received unexpected event network-vif-plugged-3473a7c7-91c3-423f-8e8e-36cd6ba107e2 for instance with vm_state building and task_state spawning. [ 869.141066] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384787, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.179873] env[62585]: DEBUG nova.network.neutron [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Successfully updated port: 3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.270731] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5229cf54-56eb-c546-d2bf-25842424bdff, 'name': SearchDatastore_Task, 'duration_secs': 0.013677} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.271079] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.271360] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7/d96a04d7-b07f-439d-aafa-09dc70a4d1a7.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.271629] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69f28e02-e307-4e7f-b92d-1297aa45198a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.279096] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 869.279096] env[62585]: value = "task-1384788" [ 869.279096] env[62585]: _type = "Task" [ 869.279096] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.287138] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384788, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.347235] env[62585]: DEBUG nova.compute.utils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.351887] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 869.352286] env[62585]: DEBUG nova.network.neutron [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.397288] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384786, 'name': CreateVM_Task, 'duration_secs': 0.650714} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.397509] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.398205] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.398389] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.398962] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 869.401755] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5b9e1d8-7a24-41cc-b1d0-8ba9990e5c21 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.407101] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 869.407101] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5276bcf2-a31d-046c-f9ae-bce4315cf01c" [ 869.407101] env[62585]: _type = "Task" [ 869.407101] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.420851] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5276bcf2-a31d-046c-f9ae-bce4315cf01c, 'name': SearchDatastore_Task, 'duration_secs': 0.009896} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.420851] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.420851] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.421066] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.421624] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.421624] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.421624] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-557bc61d-1be3-491f-9954-f9b31f9d822d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.442096] env[62585]: DEBUG nova.policy [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28834cc42f8a49cebca5647badabf8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c49ab537d42244f495aaa3cbdaafc6b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 869.445335] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.445572] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.446346] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7502f57-a507-4aa3-b87f-71277210826e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.454300] env[62585]: DEBUG nova.network.neutron [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Updated VIF entry in instance network info cache for port 3ce4bc74-6a0f-47ca-a5cc-a709b648910d. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.454967] env[62585]: DEBUG nova.network.neutron [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Updating instance_info_cache with network_info: [{"id": "3ce4bc74-6a0f-47ca-a5cc-a709b648910d", "address": "fa:16:3e:56:b0:3e", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ce4bc74-6a", "ovs_interfaceid": "3ce4bc74-6a0f-47ca-a5cc-a709b648910d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.455974] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 869.455974] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205fd36-705a-a180-f75c-d97c11b2fa5c" [ 869.455974] env[62585]: _type = "Task" [ 869.455974] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.464471] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205fd36-705a-a180-f75c-d97c11b2fa5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.598143] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37eb40e-b663-4d80-a2ca-dd5a8657c2da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.608417] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6133af52-5dc0-4e8d-99e7-2bb0db47639f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.644796] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e085f96-feb0-4d5e-8182-7c48aa67eab5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.654460] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384787, 'name': ReconfigVM_Task, 'duration_secs': 0.573297} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.657080] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Reconfigured VM instance instance-0000004d to attach disk [datastore2] b2d2a012-a62f-4237-95c3-d7153d6b223c/b2d2a012-a62f-4237-95c3-d7153d6b223c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.657908] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05a3788c-2f04-4b4e-9dac-c46c857af063 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.661019] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc71347-db70-481d-afbf-8d29929d31b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.677226] env[62585]: DEBUG nova.compute.provider_tree [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.680303] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 869.680303] env[62585]: value = "task-1384789" [ 869.680303] env[62585]: _type = "Task" [ 869.680303] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.684829] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-95de3c81-b764-4594-af86-66df7814d7aa" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.684829] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-95de3c81-b764-4594-af86-66df7814d7aa" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.684829] env[62585]: DEBUG nova.network.neutron [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.694380] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384789, 'name': Rename_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.749244] env[62585]: DEBUG nova.network.neutron [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Successfully created port: 969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.789610] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384788, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483093} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.789872] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7/d96a04d7-b07f-439d-aafa-09dc70a4d1a7.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.790111] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.790424] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a86df526-0416-4b13-8909-535020b7debb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.796798] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 869.796798] env[62585]: value = "task-1384790" [ 869.796798] env[62585]: _type = "Task" [ 869.796798] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.804523] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.852973] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 869.957217] env[62585]: DEBUG oslo_concurrency.lockutils [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] Releasing lock "refresh_cache-62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.957547] env[62585]: DEBUG nova.compute.manager [req-b9c0011e-0ecd-4ebc-8efb-e4387964f727 req-81f2ad4d-e312-4103-8392-6801af4354aa service nova] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Received event network-vif-deleted-812678f9-ea0f-4419-9b6b-98690022e9cc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.967225] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205fd36-705a-a180-f75c-d97c11b2fa5c, 'name': SearchDatastore_Task, 'duration_secs': 0.065691} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.967977] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1ca972f-0406-4cdd-9b82-992b12fa392e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.972870] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 869.972870] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52372f6d-d740-841e-fd7c-e1c2a1bfc0a5" [ 869.972870] env[62585]: _type = "Task" [ 869.972870] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.979997] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52372f6d-d740-841e-fd7c-e1c2a1bfc0a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.182133] env[62585]: DEBUG nova.scheduler.client.report [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.196227] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384789, 'name': Rename_Task, 'duration_secs': 0.157663} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.197227] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.197429] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b3652de-5377-4275-a187-454d7560281c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.204268] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 870.204268] env[62585]: value = "task-1384791" [ 870.204268] env[62585]: _type = "Task" [ 870.204268] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.212132] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.223936] env[62585]: DEBUG nova.network.neutron [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.308379] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068282} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.308666] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.309368] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb714308-db69-4fb1-9525-4f2ee2c44953 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.332739] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7/d96a04d7-b07f-439d-aafa-09dc70a4d1a7.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.333437] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5019efdf-7c53-4061-bb0b-6c8f6b177979 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.354968] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 870.354968] env[62585]: value = "task-1384792" [ 870.354968] env[62585]: _type = "Task" [ 870.354968] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.366777] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384792, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.386875] env[62585]: DEBUG nova.network.neutron [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Updating instance_info_cache with network_info: [{"id": "3473a7c7-91c3-423f-8e8e-36cd6ba107e2", "address": "fa:16:3e:a4:11:8c", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3473a7c7-91", "ovs_interfaceid": "3473a7c7-91c3-423f-8e8e-36cd6ba107e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.482900] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52372f6d-d740-841e-fd7c-e1c2a1bfc0a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009034} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.483213] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.483512] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f/62e3b57b-6c9c-4f3c-8a47-efb5fbed801f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.483782] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a216834-8933-44e3-8c07-24b695eab4e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.491409] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 870.491409] env[62585]: value = "task-1384793" [ 870.491409] env[62585]: _type = "Task" [ 870.491409] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.499391] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.690065] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.692505] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.309s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.692764] env[62585]: DEBUG nova.objects.instance [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lazy-loading 'resources' on Instance uuid d644c700-c5d1-4549-b73b-0573f268dc40 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.710212] env[62585]: INFO nova.scheduler.client.report [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleted allocations for instance 67e5af2f-4eec-41ec-916f-9f9b77596943 [ 870.718167] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384791, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.863454] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 870.871217] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.891393] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 870.891688] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 870.891855] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.892058] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 870.892216] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.892370] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 870.892585] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 870.892746] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 870.892920] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 870.893103] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 870.893461] env[62585]: DEBUG nova.virt.hardware [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 870.893762] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-95de3c81-b764-4594-af86-66df7814d7aa" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.894059] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Instance network_info: |[{"id": "3473a7c7-91c3-423f-8e8e-36cd6ba107e2", "address": "fa:16:3e:a4:11:8c", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3473a7c7-91", "ovs_interfaceid": "3473a7c7-91c3-423f-8e8e-36cd6ba107e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 870.894978] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14827ace-955e-4289-a0bd-3c9e8fcc0afc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.897796] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:11:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3473a7c7-91c3-423f-8e8e-36cd6ba107e2', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.905600] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating folder: Project (a8121e0a00494834a580b940d36e0160). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.905986] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74ece85b-5e88-4d20-b65d-3af88dee026e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.915570] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039d5759-cd1f-4096-ac8e-0c25460253f4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.920882] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created folder: Project (a8121e0a00494834a580b940d36e0160) in parent group-v293962. [ 870.921133] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating folder: Instances. Parent ref: group-v294020. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.921793] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-783ef3f5-8065-4f51-86f4-b8df5a9fdf4c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.933897] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created folder: Instances in parent group-v294020. [ 870.934285] env[62585]: DEBUG oslo.service.loopingcall [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.934567] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.934805] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad84f983-afeb-462a-a527-a1b599ebc48d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.955029] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.955029] env[62585]: value = "task-1384796" [ 870.955029] env[62585]: _type = "Task" [ 870.955029] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.966077] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384796, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.003722] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384793, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.120567] env[62585]: DEBUG nova.compute.manager [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Received event network-changed-3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.120567] env[62585]: DEBUG nova.compute.manager [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Refreshing instance network info cache due to event network-changed-3473a7c7-91c3-423f-8e8e-36cd6ba107e2. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.120567] env[62585]: DEBUG oslo_concurrency.lockutils [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] Acquiring lock "refresh_cache-95de3c81-b764-4594-af86-66df7814d7aa" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.121013] env[62585]: DEBUG oslo_concurrency.lockutils [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] Acquired lock "refresh_cache-95de3c81-b764-4594-af86-66df7814d7aa" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.121013] env[62585]: DEBUG nova.network.neutron [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Refreshing network info cache for port 3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.217119] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384791, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.220066] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9872b33f-993a-4b6b-a880-fe686d38228f tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "67e5af2f-4eec-41ec-916f-9f9b77596943" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.950s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.335038] env[62585]: DEBUG nova.network.neutron [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Successfully updated port: 969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.366141] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384792, 'name': ReconfigVM_Task, 'duration_secs': 0.771137} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.366328] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Reconfigured VM instance instance-0000004e to attach disk [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7/d96a04d7-b07f-439d-aafa-09dc70a4d1a7.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.366953] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b27816fd-9c50-47ab-a58e-3337431b9820 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.374170] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 871.374170] env[62585]: value = "task-1384797" [ 871.374170] env[62585]: _type = "Task" [ 871.374170] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.383973] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384797, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.420373] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad60b0e9-3897-4624-9334-d63bfcb0721b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.427671] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5268651d-a4b5-4d28-b8dd-5000318b4412 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.461014] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29d81ee-3cf6-448f-8834-5ab6993df3e8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.470956] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3eaa850-1bf0-4d7d-9963-17a18c45ff5b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.474408] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384796, 'name': CreateVM_Task, 'duration_secs': 0.364704} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.474574] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.475550] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.475721] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.476097] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 871.476309] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1597bcc3-ef74-4251-9223-77491f7c8b78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.486905] env[62585]: DEBUG nova.compute.provider_tree [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.490730] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 871.490730] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526ae809-b125-a02c-929e-698ea8550631" [ 871.490730] env[62585]: _type = "Task" [ 871.490730] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.500486] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526ae809-b125-a02c-929e-698ea8550631, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.503532] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639374} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.503763] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f/62e3b57b-6c9c-4f3c-8a47-efb5fbed801f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.503973] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.504212] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a10076b-0cec-4bb1-982e-65af53c8eb3e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.509525] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 871.509525] env[62585]: value = "task-1384798" [ 871.509525] env[62585]: _type = "Task" [ 871.509525] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.516268] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384798, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.715538] env[62585]: DEBUG oslo_vmware.api [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384791, 'name': PowerOnVM_Task, 'duration_secs': 1.113152} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.715802] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.716014] env[62585]: INFO nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Took 10.23 seconds to spawn the instance on the hypervisor. [ 871.716211] env[62585]: DEBUG nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 871.716951] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dade05-6901-48bf-9b99-0880c875704b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.840549] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.840707] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.840859] env[62585]: DEBUG nova.network.neutron [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 871.849697] env[62585]: DEBUG nova.network.neutron [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Updated VIF entry in instance network info cache for port 3473a7c7-91c3-423f-8e8e-36cd6ba107e2. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.850064] env[62585]: DEBUG nova.network.neutron [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Updating instance_info_cache with network_info: [{"id": "3473a7c7-91c3-423f-8e8e-36cd6ba107e2", "address": "fa:16:3e:a4:11:8c", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3473a7c7-91", "ovs_interfaceid": "3473a7c7-91c3-423f-8e8e-36cd6ba107e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.883760] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384797, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.989994] env[62585]: DEBUG nova.scheduler.client.report [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.002747] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526ae809-b125-a02c-929e-698ea8550631, 'name': SearchDatastore_Task, 'duration_secs': 0.008528} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.003110] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.003393] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.003636] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.003824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.004055] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.004342] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8083995-ae1b-402e-8a36-57678d09ec96 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.014450] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.014669] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.015859] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14c27932-31bf-4483-be7f-6f7e788aedfa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.020699] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384798, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057825} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.021259] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.021991] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f221d844-5be5-4b0f-8eba-4e12815b4a83 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.025370] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 872.025370] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526f914b-da7b-1945-7873-2250e01fec1c" [ 872.025370] env[62585]: _type = "Task" [ 872.025370] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.045092] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f/62e3b57b-6c9c-4f3c-8a47-efb5fbed801f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.045868] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ce52b3d-c672-44e8-8d1f-3d7e3141168a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.063735] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526f914b-da7b-1945-7873-2250e01fec1c, 'name': SearchDatastore_Task, 'duration_secs': 0.008455} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.064704] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0faf4844-a269-4af5-93d9-ca9f871bc06d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.070437] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 872.070437] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52cfd4d4-a782-4b29-7aa6-e2a89a829020" [ 872.070437] env[62585]: _type = "Task" [ 872.070437] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.070687] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 872.070687] env[62585]: value = "task-1384799" [ 872.070687] env[62585]: _type = "Task" [ 872.070687] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.080738] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384799, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.083361] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52cfd4d4-a782-4b29-7aa6-e2a89a829020, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.236890] env[62585]: INFO nova.compute.manager [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Took 30.11 seconds to build instance. [ 872.353286] env[62585]: DEBUG oslo_concurrency.lockutils [req-0433e99f-1ba8-4721-87d5-6a00fcaf6e5a req-5e3a38ae-e080-4ca3-a50c-ed3a223a1207 service nova] Releasing lock "refresh_cache-95de3c81-b764-4594-af86-66df7814d7aa" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.378886] env[62585]: DEBUG nova.network.neutron [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 872.387127] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384797, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.498409] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.500847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.045s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.502560] env[62585]: INFO nova.compute.claims [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.518807] env[62585]: DEBUG nova.network.neutron [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updating instance_info_cache with network_info: [{"id": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "address": "fa:16:3e:df:d6:69", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969d2b65-14", "ovs_interfaceid": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.520703] env[62585]: INFO nova.scheduler.client.report [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Deleted allocations for instance d644c700-c5d1-4549-b73b-0573f268dc40 [ 872.585162] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52cfd4d4-a782-4b29-7aa6-e2a89a829020, 'name': SearchDatastore_Task, 'duration_secs': 0.010673} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.588979] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.588979] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 95de3c81-b764-4594-af86-66df7814d7aa/95de3c81-b764-4594-af86-66df7814d7aa.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.589300] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384799, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.589561] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a88accd-7e1b-47f5-820c-5fd58df42f78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.595695] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 872.595695] env[62585]: value = "task-1384800" [ 872.595695] env[62585]: _type = "Task" [ 872.595695] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.603815] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384800, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.739186] env[62585]: DEBUG oslo_concurrency.lockutils [None req-eee2249f-b0e4-4863-a09d-11c8ae0fe3f8 tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.846s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.885411] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384797, 'name': Rename_Task, 'duration_secs': 1.147271} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.885745] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.886040] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d68d9a0d-3398-4b0b-a9f4-efd3616fa047 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.892482] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 872.892482] env[62585]: value = "task-1384801" [ 872.892482] env[62585]: _type = "Task" [ 872.892482] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.901040] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.022406] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.022709] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Instance network_info: |[{"id": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "address": "fa:16:3e:df:d6:69", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969d2b65-14", "ovs_interfaceid": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 873.023429] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:d6:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '969d2b65-14d8-4ce4-b801-2bdc9e536e20', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.031508] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating folder: Project (c49ab537d42244f495aaa3cbdaafc6b6). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.034231] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d8ffe98-d77f-436d-9091-dc1c07edf2b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.037525] env[62585]: DEBUG oslo_concurrency.lockutils [None req-f88c54c8-f813-4768-b29c-a48191a06f63 tempest-MultipleCreateTestJSON-865506198 tempest-MultipleCreateTestJSON-865506198-project-member] Lock "d644c700-c5d1-4549-b73b-0573f268dc40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.694s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.048171] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Created folder: Project (c49ab537d42244f495aaa3cbdaafc6b6) in parent group-v293962. [ 873.048447] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating folder: Instances. Parent ref: group-v294023. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.048735] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32b699b9-6463-434b-8114-bc0edfe4348a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.059669] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Created folder: Instances in parent group-v294023. [ 873.059995] env[62585]: DEBUG oslo.service.loopingcall [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.060509] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.060785] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-198d9049-5c1e-47ed-8e69-05ea599b94bd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.084051] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384799, 'name': ReconfigVM_Task, 'duration_secs': 0.687996} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.085336] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f/62e3b57b-6c9c-4f3c-8a47-efb5fbed801f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.085920] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.085920] env[62585]: value = "task-1384804" [ 873.085920] env[62585]: _type = "Task" [ 873.085920] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.086232] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88c37c31-0ee1-4662-b5a9-6c7a684ad92c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.097162] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384804, 'name': CreateVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.101047] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 873.101047] env[62585]: value = "task-1384805" [ 873.101047] env[62585]: _type = "Task" [ 873.101047] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.106993] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384800, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442203} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.107593] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 95de3c81-b764-4594-af86-66df7814d7aa/95de3c81-b764-4594-af86-66df7814d7aa.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.107895] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.108190] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70ba90c1-c9d8-4419-bc43-54e11b7d7866 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.113837] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384805, 'name': Rename_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.115239] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 873.115239] env[62585]: value = "task-1384806" [ 873.115239] env[62585]: _type = "Task" [ 873.115239] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.123327] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.149010] env[62585]: DEBUG nova.compute.manager [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Received event network-vif-plugged-969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.149318] env[62585]: DEBUG oslo_concurrency.lockutils [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] Acquiring lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.149555] env[62585]: DEBUG oslo_concurrency.lockutils [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.149730] env[62585]: DEBUG oslo_concurrency.lockutils [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.149906] env[62585]: DEBUG nova.compute.manager [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] No waiting events found dispatching network-vif-plugged-969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 873.150221] env[62585]: WARNING nova.compute.manager [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Received unexpected event network-vif-plugged-969d2b65-14d8-4ce4-b801-2bdc9e536e20 for instance with vm_state building and task_state spawning. [ 873.150461] env[62585]: DEBUG nova.compute.manager [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Received event network-changed-969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.150804] env[62585]: DEBUG nova.compute.manager [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Refreshing instance network info cache due to event network-changed-969d2b65-14d8-4ce4-b801-2bdc9e536e20. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 873.151062] env[62585]: DEBUG oslo_concurrency.lockutils [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] Acquiring lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.151246] env[62585]: DEBUG oslo_concurrency.lockutils [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] Acquired lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.151467] env[62585]: DEBUG nova.network.neutron [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Refreshing network info cache for port 969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 873.403901] env[62585]: DEBUG oslo_vmware.api [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384801, 'name': PowerOnVM_Task, 'duration_secs': 0.495587} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.403901] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.403901] env[62585]: INFO nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Took 9.57 seconds to spawn the instance on the hypervisor. [ 873.403901] env[62585]: DEBUG nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 873.404576] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d321cd5-d42b-455f-9999-a5945cf961e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.596880] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384804, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.609804] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384805, 'name': Rename_Task, 'duration_secs': 0.145565} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.612077] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.612466] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-affe5b84-eaa1-420b-942c-e5cc666c4918 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.620046] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 873.620046] env[62585]: value = "task-1384807" [ 873.620046] env[62585]: _type = "Task" [ 873.620046] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.627802] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076176} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.628497] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.629269] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc4a9bf-1795-40ea-bfd5-cbe87dcb0183 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.634290] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384807, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.655970] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 95de3c81-b764-4594-af86-66df7814d7aa/95de3c81-b764-4594-af86-66df7814d7aa.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.659683] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f74faa9-7449-4a3a-8ffe-426810c1aecc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.681271] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 873.681271] env[62585]: value = "task-1384808" [ 873.681271] env[62585]: _type = "Task" [ 873.681271] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.691657] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384808, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.730698] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c600fc-b570-4bc8-a91f-756a86789ec8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.739361] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a476ff9d-90b8-4ca6-afff-ad151214ecae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.773700] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b4f3cb-9826-49ff-b9fd-7764fd46bf5b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.781658] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9422623b-a2e3-484f-aa59-bf42f9d62e6e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.795716] env[62585]: DEBUG nova.compute.provider_tree [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.806770] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.806770] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.806990] env[62585]: INFO nova.compute.manager [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Rebooting instance [ 873.923803] env[62585]: INFO nova.compute.manager [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Took 27.82 seconds to build instance. [ 873.996664] env[62585]: DEBUG nova.network.neutron [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updated VIF entry in instance network info cache for port 969d2b65-14d8-4ce4-b801-2bdc9e536e20. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.997214] env[62585]: DEBUG nova.network.neutron [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updating instance_info_cache with network_info: [{"id": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "address": "fa:16:3e:df:d6:69", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969d2b65-14", "ovs_interfaceid": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.098161] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384804, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.134258] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384807, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.195209] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384808, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.303113] env[62585]: DEBUG nova.scheduler.client.report [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 874.339692] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.339899] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquired lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.340093] env[62585]: DEBUG nova.network.neutron [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.426521] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fef5a6eb-f532-4025-858e-054d0624879b tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.236s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.500260] env[62585]: DEBUG oslo_concurrency.lockutils [req-9d8e0dce-706c-447d-8193-cc08b58e47c5 req-fff6a7b3-fcad-456e-bf17-e7ebf259f0ee service nova] Releasing lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.602825] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384804, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.631013] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384807, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.691364] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384808, 'name': ReconfigVM_Task, 'duration_secs': 0.55633} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.691698] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 95de3c81-b764-4594-af86-66df7814d7aa/95de3c81-b764-4594-af86-66df7814d7aa.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.692397] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02553d3a-e0e9-4bd7-994b-37c20087d2d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.698814] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 874.698814] env[62585]: value = "task-1384809" [ 874.698814] env[62585]: _type = "Task" [ 874.698814] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.709868] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384809, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.810605] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.310s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.811161] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 874.814827] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.431s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.814827] env[62585]: DEBUG nova.objects.instance [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lazy-loading 'resources' on Instance uuid d2c6418c-b070-4c46-824b-18638e9b569f {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.102698] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384804, 'name': CreateVM_Task, 'duration_secs': 1.630185} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.103622] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.106015] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.106015] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.106015] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 875.106015] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ee03047-9b09-4d6c-bd73-8edf6d74879b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.111371] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 875.111371] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52dc80ea-bad2-a5c5-f5ce-24a453db845c" [ 875.111371] env[62585]: _type = "Task" [ 875.111371] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.120799] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52dc80ea-bad2-a5c5-f5ce-24a453db845c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.132169] env[62585]: DEBUG oslo_vmware.api [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384807, 'name': PowerOnVM_Task, 'duration_secs': 1.486127} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.132615] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.133023] env[62585]: INFO nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Took 8.99 seconds to spawn the instance on the hypervisor. [ 875.133557] env[62585]: DEBUG nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.137030] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a44727f-4e46-40f6-961d-5118c1f3a0a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.170116] env[62585]: DEBUG nova.network.neutron [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updating instance_info_cache with network_info: [{"id": "825c7c78-f998-4431-87b7-55f49c79830f", "address": "fa:16:3e:a6:7e:37", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c7c78-f9", "ovs_interfaceid": "825c7c78-f998-4431-87b7-55f49c79830f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.180150] env[62585]: DEBUG nova.compute.manager [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Received event network-changed-825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.180150] env[62585]: DEBUG nova.compute.manager [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Refreshing instance network info cache due to event network-changed-825c7c78-f998-4431-87b7-55f49c79830f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 875.180150] env[62585]: DEBUG oslo_concurrency.lockutils [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] Acquiring lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.210927] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384809, 'name': Rename_Task, 'duration_secs': 0.154461} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.211307] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.211519] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23c0ac55-cbab-41a6-becf-116703c0acde {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.217916] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 875.217916] env[62585]: value = "task-1384810" [ 875.217916] env[62585]: _type = "Task" [ 875.217916] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.226515] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.317112] env[62585]: DEBUG nova.compute.utils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 875.321731] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 875.321797] env[62585]: DEBUG nova.network.neutron [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 875.376089] env[62585]: DEBUG nova.policy [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26abf4eaa71482b8fd3c6425a9c683d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48929b5f0c2c41ddade223ab57002fc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 875.579957] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa18d30a-de19-434b-9fad-918ee8318e63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.593372] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6da702-87c4-455d-9af2-201c574b709a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.597324] env[62585]: DEBUG nova.compute.manager [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Stashing vm_state: active {{(pid=62585) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 875.632909] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c296d73-a9d6-4921-b88f-7b1180ee6488 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.640915] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52dc80ea-bad2-a5c5-f5ce-24a453db845c, 'name': SearchDatastore_Task, 'duration_secs': 0.021122} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.643549] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.643854] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.644465] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.644465] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.644586] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.644806] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0118b7e3-7800-43fc-a8f0-dcfa107e79f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.647792] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec471535-86b8-4faa-b1cd-57c7739129c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.667581] env[62585]: DEBUG nova.compute.provider_tree [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.673019] env[62585]: INFO nova.compute.manager [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Took 28.84 seconds to build instance. [ 875.673019] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.673019] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.673803] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e8a3933-8e1c-4969-b76e-328da6327074 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.676462] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Releasing lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.679240] env[62585]: DEBUG oslo_concurrency.lockutils [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] Acquired lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.679434] env[62585]: DEBUG nova.network.neutron [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Refreshing network info cache for port 825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.681304] env[62585]: DEBUG nova.compute.manager [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.684907] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3332b0-bed5-43f3-a53a-92f33396eef6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.688706] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 875.688706] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d25eb7-7192-e667-fa94-8981512f7b17" [ 875.688706] env[62585]: _type = "Task" [ 875.688706] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.701705] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d25eb7-7192-e667-fa94-8981512f7b17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.728033] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384810, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.817624] env[62585]: DEBUG nova.network.neutron [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Successfully created port: b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 875.822770] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 875.923583] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.062187] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.120327] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.175318] env[62585]: DEBUG nova.scheduler.client.report [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 876.179503] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6722d544-f016-4764-ac74-b1c05a83f185 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.365s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.179786] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.257s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.180044] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.180314] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.180532] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.183072] env[62585]: INFO nova.compute.manager [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Terminating instance [ 876.185022] env[62585]: DEBUG nova.compute.manager [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 876.185444] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.186294] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c5fec2-6db3-48e1-b577-3ae02a8174a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.214275] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d25eb7-7192-e667-fa94-8981512f7b17, 'name': SearchDatastore_Task, 'duration_secs': 0.019354} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.214639] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.215923] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a756850-ab32-4a1b-8879-df23d30d5dd6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.217774] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f66f207-72fe-4a38-ada7-3ecc6bf8c73a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.227987] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 876.227987] env[62585]: value = "task-1384811" [ 876.227987] env[62585]: _type = "Task" [ 876.227987] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.234959] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 876.234959] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522630f7-6c47-4cfd-6f6a-fa931b4c8472" [ 876.234959] env[62585]: _type = "Task" [ 876.234959] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.235631] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384810, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.249310] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384811, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.254496] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522630f7-6c47-4cfd-6f6a-fa931b4c8472, 'name': SearchDatastore_Task, 'duration_secs': 0.01112} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.254721] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.254986] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] f1bfef38-b6d0-40d0-8e60-310f8a75dd78/f1bfef38-b6d0-40d0-8e60-310f8a75dd78.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.255337] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb9a8c79-1fb6-4ac0-abe0-98f4b5e92fab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.263221] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 876.263221] env[62585]: value = "task-1384812" [ 876.263221] env[62585]: _type = "Task" [ 876.263221] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.271760] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.498551] env[62585]: DEBUG nova.network.neutron [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updated VIF entry in instance network info cache for port 825c7c78-f998-4431-87b7-55f49c79830f. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.498735] env[62585]: DEBUG nova.network.neutron [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updating instance_info_cache with network_info: [{"id": "825c7c78-f998-4431-87b7-55f49c79830f", "address": "fa:16:3e:a6:7e:37", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c7c78-f9", "ovs_interfaceid": "825c7c78-f998-4431-87b7-55f49c79830f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.680789] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.683327] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.126s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.685371] env[62585]: INFO nova.compute.claims [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.704494] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01ae035-6484-4446-b823-615ea0b6420e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.709149] env[62585]: INFO nova.scheduler.client.report [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Deleted allocations for instance d2c6418c-b070-4c46-824b-18638e9b569f [ 876.715262] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Doing hard reboot of VM {{(pid=62585) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 876.715927] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.716509] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.716509] env[62585]: INFO nova.compute.manager [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Shelving [ 876.717948] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d7fd2551-90c1-4d8d-8e2f-27e24950c41a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.728728] env[62585]: DEBUG oslo_vmware.api [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 876.728728] env[62585]: value = "task-1384813" [ 876.728728] env[62585]: _type = "Task" [ 876.728728] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.736364] env[62585]: DEBUG oslo_vmware.api [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384810, 'name': PowerOnVM_Task, 'duration_secs': 1.32794} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.741147] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.741684] env[62585]: INFO nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Took 8.22 seconds to spawn the instance on the hypervisor. [ 876.741684] env[62585]: DEBUG nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 876.746871] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff91338-8ea9-4511-aa85-9ec79b2807f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.750773] env[62585]: DEBUG oslo_vmware.api [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384813, 'name': ResetVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.756951] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384811, 'name': PowerOffVM_Task, 'duration_secs': 0.233108} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.759196] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.759454] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.763336] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20f2c0b0-4630-4a16-b3bf-e3a8a99f6400 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.776273] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384812, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.834458] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.834765] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.834939] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleting the datastore file [datastore2] 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.835292] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-277e3740-ecca-453f-91ed-47bca5320bc8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.838554] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 876.847164] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 876.847164] env[62585]: value = "task-1384815" [ 876.847164] env[62585]: _type = "Task" [ 876.847164] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.856729] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.869945] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 876.869945] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 876.869945] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.869945] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 876.869945] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.870223] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 876.870287] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 876.870477] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 876.870650] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 876.870817] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 876.870992] env[62585]: DEBUG nova.virt.hardware [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 876.871852] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e85f120-8559-4f37-97b5-2173a82554f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.879339] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccdfb39-a2e1-4389-98cb-6ee7e6dc9a9a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.004608] env[62585]: DEBUG oslo_concurrency.lockutils [req-83017d96-9982-4862-b664-10d80227b6e8 req-c743c135-42c9-4f21-94c9-72b3e4af395d service nova] Releasing lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.058703] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 877.218787] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8aa459d5-e715-4a05-8dd5-d0f969f81580 tempest-ServersTestMultiNic-1082413995 tempest-ServersTestMultiNic-1082413995-project-member] Lock "d2c6418c-b070-4c46-824b-18638e9b569f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.047s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.224713] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.224996] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a50c074c-b14a-4832-81a7-5999f8780402 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.236098] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 877.236098] env[62585]: value = "task-1384816" [ 877.236098] env[62585]: _type = "Task" [ 877.236098] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.244544] env[62585]: DEBUG oslo_vmware.api [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384813, 'name': ResetVM_Task, 'duration_secs': 0.098202} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.245307] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Did hard reboot of VM {{(pid=62585) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 877.245569] env[62585]: DEBUG nova.compute.manager [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.246480] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc395392-d52d-48f0-8a69-08b1d601436d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.252543] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.277761] env[62585]: INFO nova.compute.manager [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Took 29.44 seconds to build instance. [ 877.282505] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384812, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728003} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.282872] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] f1bfef38-b6d0-40d0-8e60-310f8a75dd78/f1bfef38-b6d0-40d0-8e60-310f8a75dd78.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.283205] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.283439] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4c62fbf-e03d-47c9-9239-1b8bea6f0cda {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.289664] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 877.289664] env[62585]: value = "task-1384817" [ 877.289664] env[62585]: _type = "Task" [ 877.289664] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.299788] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384817, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.311526] env[62585]: DEBUG nova.compute.manager [req-1df3b3bc-4913-4683-9a72-8d8b56c51b98 req-29f32fdd-415a-403b-8c3b-de3937f61e88 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-vif-plugged-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.311796] env[62585]: DEBUG oslo_concurrency.lockutils [req-1df3b3bc-4913-4683-9a72-8d8b56c51b98 req-29f32fdd-415a-403b-8c3b-de3937f61e88 service nova] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.312025] env[62585]: DEBUG oslo_concurrency.lockutils [req-1df3b3bc-4913-4683-9a72-8d8b56c51b98 req-29f32fdd-415a-403b-8c3b-de3937f61e88 service nova] Lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.312204] env[62585]: DEBUG oslo_concurrency.lockutils [req-1df3b3bc-4913-4683-9a72-8d8b56c51b98 req-29f32fdd-415a-403b-8c3b-de3937f61e88 service nova] Lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.312372] env[62585]: DEBUG nova.compute.manager [req-1df3b3bc-4913-4683-9a72-8d8b56c51b98 req-29f32fdd-415a-403b-8c3b-de3937f61e88 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] No waiting events found dispatching network-vif-plugged-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 877.312540] env[62585]: WARNING nova.compute.manager [req-1df3b3bc-4913-4683-9a72-8d8b56c51b98 req-29f32fdd-415a-403b-8c3b-de3937f61e88 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received unexpected event network-vif-plugged-b29379d9-a516-40cd-b7f0-35505b917bcb for instance with vm_state building and task_state spawning. [ 877.356367] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.408970] env[62585]: DEBUG nova.network.neutron [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Successfully updated port: b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.749314] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384816, 'name': PowerOffVM_Task, 'duration_secs': 0.326181} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.751139] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.752990] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cf52b5-902b-4e13-9443-f1f3475e7754 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.778016] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1c390105-77de-42c1-b381-b524c830012d tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.970s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.784741] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d3ac4e-4ea3-44ef-9964-36fcfe26c661 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.788488] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0e2a9272-a47c-48b7-97a3-60529d86cb8a tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "95de3c81-b764-4594-af86-66df7814d7aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.458s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.805567] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384817, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081441} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.805865] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.807176] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1aaa8e-c854-4daa-8bcd-bc60dc354537 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.833182] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] f1bfef38-b6d0-40d0-8e60-310f8a75dd78/f1bfef38-b6d0-40d0-8e60-310f8a75dd78.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.836716] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ff07ef3-9e3e-45b3-b767-506526fa3ad9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.865951] env[62585]: DEBUG oslo_vmware.api [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.531237} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.872138] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.872138] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.872138] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.872138] env[62585]: INFO nova.compute.manager [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Took 1.69 seconds to destroy the instance on the hypervisor. [ 877.872138] env[62585]: DEBUG oslo.service.loopingcall [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.872138] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 877.872138] env[62585]: value = "task-1384818" [ 877.872138] env[62585]: _type = "Task" [ 877.872138] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.872746] env[62585]: DEBUG nova.compute.manager [-] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 877.872746] env[62585]: DEBUG nova.network.neutron [-] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.884657] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384818, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.911609] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.911609] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.911609] env[62585]: DEBUG nova.network.neutron [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.975030] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739bb988-a411-4c7a-aeb9-3dc2715c7cc8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.983256] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2a6295-271a-4656-b776-58737a35e556 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.012515] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba901e1f-42f0-44fa-ae09-31cac28e3d4b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.020136] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487b47c1-c977-4830-92ec-76ebe937b871 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.033297] env[62585]: DEBUG nova.compute.provider_tree [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.062238] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 878.062411] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 878.062525] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Rebuilding the list of instances to heal {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 878.300426] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Creating Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 878.300809] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-df6c6ac6-87bd-4490-99ee-73417b797a42 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.309341] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 878.309341] env[62585]: value = "task-1384819" [ 878.309341] env[62585]: _type = "Task" [ 878.309341] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.319216] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384819, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.370830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "95de3c81-b764-4594-af86-66df7814d7aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.370830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "95de3c81-b764-4594-af86-66df7814d7aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.370830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "95de3c81-b764-4594-af86-66df7814d7aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.370830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "95de3c81-b764-4594-af86-66df7814d7aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.370830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "95de3c81-b764-4594-af86-66df7814d7aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.376022] env[62585]: INFO nova.compute.manager [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Terminating instance [ 878.376022] env[62585]: DEBUG nova.compute.manager [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 878.376022] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.378986] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931d2a61-5af4-4937-b587-3562b0f98def {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.387118] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384818, 'name': ReconfigVM_Task, 'duration_secs': 0.268947} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.389475] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Reconfigured VM instance instance-00000051 to attach disk [datastore1] f1bfef38-b6d0-40d0-8e60-310f8a75dd78/f1bfef38-b6d0-40d0-8e60-310f8a75dd78.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.390266] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.390629] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b9deed0-7d45-420e-9149-3c3d1e3594bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.392164] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b76abeb0-4157-4061-b572-aab5dfcd12f4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.398773] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 878.398773] env[62585]: value = "task-1384820" [ 878.398773] env[62585]: _type = "Task" [ 878.398773] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.400271] env[62585]: DEBUG oslo_vmware.api [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 878.400271] env[62585]: value = "task-1384821" [ 878.400271] env[62585]: _type = "Task" [ 878.400271] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.417018] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384820, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.420403] env[62585]: DEBUG oslo_vmware.api [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.465762] env[62585]: DEBUG nova.network.neutron [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.536986] env[62585]: DEBUG nova.scheduler.client.report [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.567336] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Skipping network cache update for instance because it is being deleted. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 878.567543] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Skipping network cache update for instance because it is being deleted. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 878.567689] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 878.567822] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 878.567944] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Skipping network cache update for instance because it is Building. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 878.624614] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.625483] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquired lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.625954] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Forcefully refreshing network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 878.625954] env[62585]: DEBUG nova.objects.instance [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lazy-loading 'info_cache' on Instance uuid 8763a058-b453-4f03-9532-7d7e65efdfb2 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.656843] env[62585]: DEBUG nova.compute.manager [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Received event network-changed-825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.657072] env[62585]: DEBUG nova.compute.manager [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Refreshing instance network info cache due to event network-changed-825c7c78-f998-4431-87b7-55f49c79830f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 878.657294] env[62585]: DEBUG oslo_concurrency.lockutils [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] Acquiring lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.657524] env[62585]: DEBUG oslo_concurrency.lockutils [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] Acquired lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.657643] env[62585]: DEBUG nova.network.neutron [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Refreshing network info cache for port 825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.735814] env[62585]: DEBUG nova.network.neutron [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.821190] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384819, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.912548] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384820, 'name': Rename_Task, 'duration_secs': 0.148247} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.916139] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.916415] env[62585]: DEBUG oslo_vmware.api [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384821, 'name': PowerOffVM_Task, 'duration_secs': 0.182284} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.916617] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6b16794-c5a5-4d58-bf78-dbeea52c804c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.918134] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.918324] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.918553] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec054044-f2e6-4e46-bcd4-555497b4a849 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.925182] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 878.925182] env[62585]: value = "task-1384822" [ 878.925182] env[62585]: _type = "Task" [ 878.925182] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.935758] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.002395] env[62585]: DEBUG nova.network.neutron [-] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.003644] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.003830] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.004016] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleting the datastore file [datastore1] 95de3c81-b764-4594-af86-66df7814d7aa {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.004471] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fa916df-23b3-477d-a70f-51bc083e2934 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.010733] env[62585]: DEBUG oslo_vmware.api [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 879.010733] env[62585]: value = "task-1384824" [ 879.010733] env[62585]: _type = "Task" [ 879.010733] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.019530] env[62585]: DEBUG oslo_vmware.api [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.051333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.051958] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 879.054868] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.675s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.058053] env[62585]: DEBUG nova.objects.instance [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lazy-loading 'resources' on Instance uuid 679380d4-5b96-4c30-bac9-f7163f19c609 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.238588] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.238911] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Instance network_info: |[{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 879.239370] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:50:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b29379d9-a516-40cd-b7f0-35505b917bcb', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.247465] env[62585]: DEBUG oslo.service.loopingcall [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.249768] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.250065] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4e33338-0741-4a41-80bb-1aa02aedcbd9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.270270] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.270270] env[62585]: value = "task-1384825" [ 879.270270] env[62585]: _type = "Task" [ 879.270270] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.279219] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384825, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.310360] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.310975] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.311746] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "b2d2a012-a62f-4237-95c3-d7153d6b223c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.311746] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.311850] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.319232] env[62585]: INFO nova.compute.manager [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Terminating instance [ 879.324193] env[62585]: DEBUG nova.compute.manager [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 879.324450] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.325787] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b5d1ef-4e21-4bb0-83d6-679ee657ef30 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.332995] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384819, 'name': CreateSnapshot_Task, 'duration_secs': 0.703978} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.333776] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Created Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 879.335636] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c0cf1f-3930-4f37-afb3-6bc8aca7ad2e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.342836] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.342836] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f61dc2ba-5aaa-407f-b707-7117204f02f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.354332] env[62585]: DEBUG oslo_vmware.api [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 879.354332] env[62585]: value = "task-1384826" [ 879.354332] env[62585]: _type = "Task" [ 879.354332] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.357866] env[62585]: DEBUG nova.compute.manager [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.358108] env[62585]: DEBUG nova.compute.manager [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing instance network info cache due to event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 879.358335] env[62585]: DEBUG oslo_concurrency.lockutils [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.358901] env[62585]: DEBUG oslo_concurrency.lockutils [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.359164] env[62585]: DEBUG nova.network.neutron [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.370423] env[62585]: DEBUG oslo_vmware.api [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.435342] env[62585]: DEBUG oslo_vmware.api [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384822, 'name': PowerOnVM_Task, 'duration_secs': 0.474396} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.435697] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.435868] env[62585]: INFO nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Took 8.57 seconds to spawn the instance on the hypervisor. [ 879.436067] env[62585]: DEBUG nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.436951] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145d6aad-39f0-4040-95c3-d44dba98854e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.441841] env[62585]: DEBUG nova.network.neutron [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updated VIF entry in instance network info cache for port 825c7c78-f998-4431-87b7-55f49c79830f. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.442186] env[62585]: DEBUG nova.network.neutron [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updating instance_info_cache with network_info: [{"id": "825c7c78-f998-4431-87b7-55f49c79830f", "address": "fa:16:3e:a6:7e:37", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c7c78-f9", "ovs_interfaceid": "825c7c78-f998-4431-87b7-55f49c79830f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.504926] env[62585]: INFO nova.compute.manager [-] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Took 1.63 seconds to deallocate network for instance. [ 879.523438] env[62585]: DEBUG oslo_vmware.api [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237704} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.523664] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.523753] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.523943] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.525549] env[62585]: INFO nova.compute.manager [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Took 1.15 seconds to destroy the instance on the hypervisor. [ 879.525549] env[62585]: DEBUG oslo.service.loopingcall [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.525549] env[62585]: DEBUG nova.compute.manager [-] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 879.525549] env[62585]: DEBUG nova.network.neutron [-] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.559934] env[62585]: DEBUG nova.compute.utils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.564592] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 879.564759] env[62585]: DEBUG nova.network.neutron [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.624594] env[62585]: DEBUG nova.policy [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01befe1db3684d60943c74da2c2c9fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f00751679b29472e9ab92c9e48a99925', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.781096] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384825, 'name': CreateVM_Task, 'duration_secs': 0.377376} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.781273] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.781947] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.782141] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.782459] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 879.783601] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-735ac9ec-f256-4dc6-98cf-f71635f01a7d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.785835] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25937bd2-765f-4bd0-96d5-b698193c7778 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.791784] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 879.791784] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52625147-3e10-b954-4c33-762d94d375bf" [ 879.791784] env[62585]: _type = "Task" [ 879.791784] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.796670] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3263e8c-8316-44a1-a085-a0a4211058fa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.805015] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52625147-3e10-b954-4c33-762d94d375bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.830469] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaa7376-9a78-48a5-8f7a-f5a5b08fee3c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.837824] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2331e3cc-7894-4a34-b5fe-5f4bb964ce18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.863862] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Creating linked-clone VM from snapshot {{(pid=62585) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 879.864395] env[62585]: DEBUG nova.compute.provider_tree [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.868577] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b9e414d8-974c-46bb-b443-853b58350ff5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.883669] env[62585]: DEBUG oslo_vmware.api [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384826, 'name': PowerOffVM_Task, 'duration_secs': 0.195659} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.884977] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.885181] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.885481] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 879.885481] env[62585]: value = "task-1384827" [ 879.885481] env[62585]: _type = "Task" [ 879.885481] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.885672] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2478217-3b5d-4b83-840e-13b29c64bcc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.895156] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384827, 'name': CloneVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.946019] env[62585]: DEBUG oslo_concurrency.lockutils [req-f1d980b2-5554-4bc2-96e3-da8cbbe1e728 req-8d8836e9-0010-4c72-b3c1-2bc017ab2757 service nova] Releasing lock "refresh_cache-b2d2a012-a62f-4237-95c3-d7153d6b223c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.954663] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.954878] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.958861] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Deleting the datastore file [datastore2] b2d2a012-a62f-4237-95c3-d7153d6b223c {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.961794] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-517d800c-e4fd-4ced-8ef1-abdf78d97ddc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.965276] env[62585]: INFO nova.compute.manager [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Took 30.12 seconds to build instance. [ 879.970740] env[62585]: DEBUG oslo_vmware.api [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 879.970740] env[62585]: value = "task-1384829" [ 879.970740] env[62585]: _type = "Task" [ 879.970740] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.980796] env[62585]: DEBUG oslo_vmware.api [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.011950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.064544] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 880.070289] env[62585]: DEBUG nova.network.neutron [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Successfully created port: 5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.268476] env[62585]: DEBUG nova.network.neutron [-] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.271593] env[62585]: DEBUG nova.network.neutron [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updated VIF entry in instance network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.271990] env[62585]: DEBUG nova.network.neutron [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.309680] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52625147-3e10-b954-4c33-762d94d375bf, 'name': SearchDatastore_Task, 'duration_secs': 0.031178} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.312465] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.312682] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.312946] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.313386] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.313625] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.313925] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3711813a-01a2-425e-b667-38f758f11f04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.325336] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.325568] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.326602] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a6641f-817f-48ad-8469-b05af8957006 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.332498] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 880.332498] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5256ced0-a378-a531-aefa-fde0c44bd62b" [ 880.332498] env[62585]: _type = "Task" [ 880.332498] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.341058] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5256ced0-a378-a531-aefa-fde0c44bd62b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.368162] env[62585]: DEBUG nova.scheduler.client.report [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.397628] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384827, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.470342] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [{"id": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "address": "fa:16:3e:01:b1:72", "network": {"id": "8d1518c7-e8f5-4297-9bb4-b9b4a16a1481", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454559249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c76fd292d84bbe97c7221e75831fbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap597e2cc3-d0", "ovs_interfaceid": "597e2cc3-d043-4c6b-a254-2d9838a1ebf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.471631] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6565eaa-04c1-41b2-9088-6ac99fadef96 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.630s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.481701] env[62585]: DEBUG oslo_vmware.api [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288949} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.482563] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.482894] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.483171] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.483452] env[62585]: INFO nova.compute.manager [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 880.483858] env[62585]: DEBUG oslo.service.loopingcall [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.484400] env[62585]: DEBUG nova.compute.manager [-] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.484789] env[62585]: DEBUG nova.network.neutron [-] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.682537] env[62585]: DEBUG nova.compute.manager [req-c900a1bc-dfe7-4d60-b4b2-1e0dbe0ec1d5 req-9a773f89-3f1c-464d-b31f-bdd1b686b65d service nova] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Received event network-vif-deleted-3473a7c7-91c3-423f-8e8e-36cd6ba107e2 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.771576] env[62585]: INFO nova.compute.manager [-] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Took 1.25 seconds to deallocate network for instance. [ 880.780627] env[62585]: DEBUG oslo_concurrency.lockutils [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.781039] env[62585]: DEBUG nova.compute.manager [req-0694a016-4e3a-4520-a5a3-8be746134f58 req-8427f7fa-af7b-4a62-a585-5a51c835a5d1 service nova] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Received event network-vif-deleted-3ce4bc74-6a0f-47ca-a5cc-a709b648910d {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.844342] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5256ced0-a378-a531-aefa-fde0c44bd62b, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.845403] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae60f46-185a-4ea3-ad29-1fedfa2647dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.852965] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 880.852965] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b5279c-3fb1-c2f7-4d85-01951604dde3" [ 880.852965] env[62585]: _type = "Task" [ 880.852965] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.861378] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b5279c-3fb1-c2f7-4d85-01951604dde3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.873481] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.876025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.202s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.876025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.876127] env[62585]: INFO nova.compute.manager [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Successfully reverted task state from image_uploading on failure for instance. [ 880.878385] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.758s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server [None req-2ee0cb45-47f3-451b-9796-f214d9401714 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created [ 880.881591] env[62585]: Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created' [ 880.881591] env[62585]: Faults: [ManagedObjectNotFound] [ 880.881591] env[62585]: Details: {'obj': 'snapshot-294004'} [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server response = request(managed_object, **kwargs) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server return client.invoke(args, kwargs) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server result = self.send(soapenv, timeout=timeout) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server return self.process_reply(reply.message, None, None) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server raise WebFault(fault, replyroot) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server suds.WebFault: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created' [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server return api_method(*args, **kwargs) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server raise exceptions.VimFaultException(fault_list, fault_string, [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.VimFaultException: The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created' [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-294004'} [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server raise self.value [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server raise self.value [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 880.881591] env[62585]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server raise self.value [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server raise self.value [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4442, in snapshot_instance [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4475, in _snapshot_instance [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 571, in snapshot [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 933, in _delete_vm_snapshot [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server delete_snapshot_task = self._session._call_method( [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 127, in _call_method [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception() as ctxt: [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server raise self.value [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return self.invoke_api(module, method, *args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return _invoke_api(module, method, *args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return evt.wait() [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server idle = self.f(*self.args, **self.kw) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api [ 880.882997] env[62585]: ERROR oslo_messaging.rpc.server raise clazz(str(excep), [ 880.884487] env[62585]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created [ 880.884487] env[62585]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-294004' has already been deleted or has not been completely created' [ 880.884487] env[62585]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 880.884487] env[62585]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-294004'} [ 880.884487] env[62585]: ERROR oslo_messaging.rpc.server [ 880.897725] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384827, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.899604] env[62585]: INFO nova.scheduler.client.report [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted allocations for instance 679380d4-5b96-4c30-bac9-f7163f19c609 [ 880.972585] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Releasing lock "refresh_cache-8763a058-b453-4f03-9532-7d7e65efdfb2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.972816] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updated the network info_cache for instance {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 880.973016] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.973180] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.973335] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.973477] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.973622] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.973749] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 880.973888] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.076028] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 881.099535] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.099822] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.100013] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.100242] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.100406] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.100584] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.100815] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.100994] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.101232] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.101385] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.101593] env[62585]: DEBUG nova.virt.hardware [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.102531] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9a29d2-2992-4521-9a9d-5d6f5726829f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.111923] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7028a3-2674-45af-be26-a455a0e49dc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.191843] env[62585]: DEBUG nova.network.neutron [-] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.279170] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.353650] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.353983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.354227] env[62585]: INFO nova.compute.manager [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Shelving [ 881.370773] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b5279c-3fb1-c2f7-4d85-01951604dde3, 'name': SearchDatastore_Task, 'duration_secs': 0.009684} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.371715] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.371984] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] ddb1103d-a846-4229-b441-de45424b4ec9/ddb1103d-a846-4229-b441-de45424b4ec9.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 881.372539] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9926db08-4aa9-419a-bb2e-4429610b1b07 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.379944] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 881.379944] env[62585]: value = "task-1384830" [ 881.379944] env[62585]: _type = "Task" [ 881.379944] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.383186] env[62585]: INFO nova.compute.claims [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.405820] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384827, 'name': CloneVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.409184] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b49aa58-277f-4e32-ab4f-4bf44b1330e0 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "679380d4-5b96-4c30-bac9-f7163f19c609" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.368s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.410922] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.476997] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.694730] env[62585]: INFO nova.compute.manager [-] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Took 1.21 seconds to deallocate network for instance. [ 881.762849] env[62585]: DEBUG nova.compute.manager [req-6d14c377-7ef2-4c77-8b03-538ccbfa5bd9 req-dd6f9523-1167-43d3-a211-eb3bb0c3764b service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Received event network-vif-plugged-5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.763836] env[62585]: DEBUG oslo_concurrency.lockutils [req-6d14c377-7ef2-4c77-8b03-538ccbfa5bd9 req-dd6f9523-1167-43d3-a211-eb3bb0c3764b service nova] Acquiring lock "891e5a42-3681-47eb-ac88-015fa21a6580-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.763836] env[62585]: DEBUG oslo_concurrency.lockutils [req-6d14c377-7ef2-4c77-8b03-538ccbfa5bd9 req-dd6f9523-1167-43d3-a211-eb3bb0c3764b service nova] Lock "891e5a42-3681-47eb-ac88-015fa21a6580-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.765455] env[62585]: DEBUG oslo_concurrency.lockutils [req-6d14c377-7ef2-4c77-8b03-538ccbfa5bd9 req-dd6f9523-1167-43d3-a211-eb3bb0c3764b service nova] Lock "891e5a42-3681-47eb-ac88-015fa21a6580-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.765455] env[62585]: DEBUG nova.compute.manager [req-6d14c377-7ef2-4c77-8b03-538ccbfa5bd9 req-dd6f9523-1167-43d3-a211-eb3bb0c3764b service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] No waiting events found dispatching network-vif-plugged-5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 881.765455] env[62585]: WARNING nova.compute.manager [req-6d14c377-7ef2-4c77-8b03-538ccbfa5bd9 req-dd6f9523-1167-43d3-a211-eb3bb0c3764b service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Received unexpected event network-vif-plugged-5606ac00-3eff-44ad-9c80-a4014e4c2724 for instance with vm_state building and task_state spawning. [ 881.867216] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.867537] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc59fb0d-9e35-43f0-a6e7-44ef13d40b2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.876649] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 881.876649] env[62585]: value = "task-1384831" [ 881.876649] env[62585]: _type = "Task" [ 881.876649] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.888632] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45179} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.891357] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] ddb1103d-a846-4229-b441-de45424b4ec9/ddb1103d-a846-4229-b441-de45424b4ec9.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.891674] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.893331] env[62585]: INFO nova.compute.resource_tracker [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating resource usage from migration fd35d3e7-abab-46e9-98a6-bb0a3cbf68a6 [ 881.896115] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.896600] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bacf22d4-9170-4df7-b8a3-9523d8bc95a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.907303] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384827, 'name': CloneVM_Task, 'duration_secs': 1.585979} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.908555] env[62585]: INFO nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Created linked-clone VM from snapshot [ 881.908906] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 881.908906] env[62585]: value = "task-1384832" [ 881.908906] env[62585]: _type = "Task" [ 881.908906] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.909821] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10815a9d-5a48-42bb-b862-e53124239479 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.914068] env[62585]: DEBUG nova.network.neutron [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Successfully updated port: 5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.930606] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.930825] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Uploading image 5d78a50d-b3a6-4aa7-8847-eb087b11a97e {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 881.960588] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 881.960588] env[62585]: value = "vm-294028" [ 881.960588] env[62585]: _type = "VirtualMachine" [ 881.960588] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 881.961029] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-80c92a34-8349-4483-b777-32889f9bdba4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.968697] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lease: (returnval){ [ 881.968697] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227ba05-a03b-4a9f-d58e-04d172737bba" [ 881.968697] env[62585]: _type = "HttpNfcLease" [ 881.968697] env[62585]: } obtained for exporting VM: (result){ [ 881.968697] env[62585]: value = "vm-294028" [ 881.968697] env[62585]: _type = "VirtualMachine" [ 881.968697] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 881.968996] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the lease: (returnval){ [ 881.968996] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227ba05-a03b-4a9f-d58e-04d172737bba" [ 881.968996] env[62585]: _type = "HttpNfcLease" [ 881.968996] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 881.976355] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 881.976355] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227ba05-a03b-4a9f-d58e-04d172737bba" [ 881.976355] env[62585]: _type = "HttpNfcLease" [ 881.976355] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 882.116252] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c885c47-61d0-4680-b88d-9243894ac25d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.122939] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb5d162-fa72-4ff2-8f3e-208732bf98f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.155509] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a65a12-bbe2-4013-b6d7-9a73e1f943c3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.162934] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c280bb5d-743b-4bd7-a562-76b1f3bf86ee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.177197] env[62585]: DEBUG nova.compute.provider_tree [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.204566] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.388640] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384831, 'name': PowerOffVM_Task, 'duration_secs': 0.184631} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.389625] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 882.389895] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f7c348-adeb-4d1d-964e-ca2dede31955 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.409252] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c92555-9c8f-4391-ad8a-fbdee2da5579 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.421891] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "refresh_cache-891e5a42-3681-47eb-ac88-015fa21a6580" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.422209] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "refresh_cache-891e5a42-3681-47eb-ac88-015fa21a6580" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.422487] env[62585]: DEBUG nova.network.neutron [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.428923] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07675} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.433126] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.435142] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4f9837-cf2f-4a60-b902-454e47fa15fd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.463245] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] ddb1103d-a846-4229-b441-de45424b4ec9/ddb1103d-a846-4229-b441-de45424b4ec9.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.463827] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72e58cc1-9247-4263-9976-fb1aadf6016c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.481804] env[62585]: DEBUG nova.network.neutron [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.486515] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 882.486515] env[62585]: value = "task-1384834" [ 882.486515] env[62585]: _type = "Task" [ 882.486515] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.488138] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 882.488138] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227ba05-a03b-4a9f-d58e-04d172737bba" [ 882.488138] env[62585]: _type = "HttpNfcLease" [ 882.488138] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 882.491014] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 882.491014] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5227ba05-a03b-4a9f-d58e-04d172737bba" [ 882.491014] env[62585]: _type = "HttpNfcLease" [ 882.491014] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 882.491692] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b729438-6a5d-4547-a975-cc380d297c3e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.498682] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384834, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.503840] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b77ee5-632d-e30b-741c-50c065876832/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 882.504041] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b77ee5-632d-e30b-741c-50c065876832/disk-0.vmdk for reading. {{(pid=62585) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 882.666572] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0a29fe50-bfff-4ed2-8a33-8c5e3ebc0f86 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.680432] env[62585]: DEBUG nova.scheduler.client.report [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.734668] env[62585]: DEBUG nova.network.neutron [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Updating instance_info_cache with network_info: [{"id": "5606ac00-3eff-44ad-9c80-a4014e4c2724", "address": "fa:16:3e:ee:ab:3c", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5606ac00-3e", "ovs_interfaceid": "5606ac00-3eff-44ad-9c80-a4014e4c2724", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.765118] env[62585]: DEBUG nova.compute.manager [req-9b9791d1-58ad-4948-92db-edf83916228e req-a483aee8-7b75-44e3-ad32-c33059805c4d service nova] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Received event network-vif-deleted-825c7c78-f998-4431-87b7-55f49c79830f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.898874] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.899372] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.930790] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Creating Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 882.931556] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9968c624-4524-4728-9d1a-45399318faca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.941302] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 882.941302] env[62585]: value = "task-1384835" [ 882.941302] env[62585]: _type = "Task" [ 882.941302] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.951743] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384835, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.998345] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384834, 'name': ReconfigVM_Task, 'duration_secs': 0.305397} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.998607] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Reconfigured VM instance instance-00000052 to attach disk [datastore2] ddb1103d-a846-4229-b441-de45424b4ec9/ddb1103d-a846-4229-b441-de45424b4ec9.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.999163] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bd70dce-6c75-41c7-9ff6-b66d61fe3032 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.006248] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 883.006248] env[62585]: value = "task-1384836" [ 883.006248] env[62585]: _type = "Task" [ 883.006248] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.014071] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384836, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.186910] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.308s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.187206] env[62585]: INFO nova.compute.manager [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Migrating [ 883.187511] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.187702] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.189227] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.178s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.189522] env[62585]: DEBUG nova.objects.instance [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'resources' on Instance uuid 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.200720] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "2cf85b78-df04-40d0-a7db-5e8979574d0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.201098] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.237711] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "refresh_cache-891e5a42-3681-47eb-ac88-015fa21a6580" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.238094] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Instance network_info: |[{"id": "5606ac00-3eff-44ad-9c80-a4014e4c2724", "address": "fa:16:3e:ee:ab:3c", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5606ac00-3e", "ovs_interfaceid": "5606ac00-3eff-44ad-9c80-a4014e4c2724", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 883.238886] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:ab:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5606ac00-3eff-44ad-9c80-a4014e4c2724', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.247071] env[62585]: DEBUG oslo.service.loopingcall [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.247902] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.248899] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91adfd96-aba1-4085-a9f4-7f1771baceae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.276150] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.276150] env[62585]: value = "task-1384837" [ 883.276150] env[62585]: _type = "Task" [ 883.276150] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.287622] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384837, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.403945] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 883.452719] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384835, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.486007] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.486495] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.517451] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384836, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.691492] env[62585]: INFO nova.compute.rpcapi [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 883.692162] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.709239] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 883.790221] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384837, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.793354] env[62585]: DEBUG nova.compute.manager [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Received event network-changed-5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.793470] env[62585]: DEBUG nova.compute.manager [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Refreshing instance network info cache due to event network-changed-5606ac00-3eff-44ad-9c80-a4014e4c2724. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 883.793742] env[62585]: DEBUG oslo_concurrency.lockutils [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] Acquiring lock "refresh_cache-891e5a42-3681-47eb-ac88-015fa21a6580" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.793898] env[62585]: DEBUG oslo_concurrency.lockutils [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] Acquired lock "refresh_cache-891e5a42-3681-47eb-ac88-015fa21a6580" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.794088] env[62585]: DEBUG nova.network.neutron [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Refreshing network info cache for port 5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.928124] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.953030] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384835, 'name': CreateSnapshot_Task, 'duration_secs': 0.696899} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.954460] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Created Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 883.955326] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffe3365-8721-431c-8586-d9a3223bba46 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.958314] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7a6841-56b6-452b-bb92-b5b8d5888854 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.973218] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8ac6be-4d7e-4d87-b5ce-f1b53c6411ae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.005617] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 884.010141] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174886c1-feed-4373-bb52-a978a1189d1f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.026130] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384836, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.029168] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bb18f6-4787-4844-a0f4-5efad76c6a7b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.044428] env[62585]: DEBUG nova.compute.provider_tree [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.212780] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.212983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.213290] env[62585]: DEBUG nova.network.neutron [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.230026] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.292191] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384837, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.479587] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Creating linked-clone VM from snapshot {{(pid=62585) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 884.480067] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-963ba39d-e69f-427d-8ae6-e060805c74eb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.491272] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 884.491272] env[62585]: value = "task-1384838" [ 884.491272] env[62585]: _type = "Task" [ 884.491272] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.502052] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384838, 'name': CloneVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.525678] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384836, 'name': Rename_Task, 'duration_secs': 1.150765} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.528146] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.528430] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03325cb4-dcde-4af5-8a47-7e42e1eff850 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.531417] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.538900] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 884.538900] env[62585]: value = "task-1384839" [ 884.538900] env[62585]: _type = "Task" [ 884.538900] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.551558] env[62585]: DEBUG nova.scheduler.client.report [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.554976] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.595106] env[62585]: DEBUG nova.network.neutron [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Updated VIF entry in instance network info cache for port 5606ac00-3eff-44ad-9c80-a4014e4c2724. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.595512] env[62585]: DEBUG nova.network.neutron [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Updating instance_info_cache with network_info: [{"id": "5606ac00-3eff-44ad-9c80-a4014e4c2724", "address": "fa:16:3e:ee:ab:3c", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5606ac00-3e", "ovs_interfaceid": "5606ac00-3eff-44ad-9c80-a4014e4c2724", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.791565] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384837, 'name': CreateVM_Task, 'duration_secs': 1.208227} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.794299] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.795225] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.795545] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.796126] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.796746] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46e1f389-de54-41dd-ad55-ab5e11ba6b6b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.803278] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 884.803278] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52508092-801f-034e-1b50-3e4bb2ad11c8" [ 884.803278] env[62585]: _type = "Task" [ 884.803278] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.814744] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52508092-801f-034e-1b50-3e4bb2ad11c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.997729] env[62585]: DEBUG nova.network.neutron [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [{"id": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "address": "fa:16:3e:35:20:3f", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc92c2d1e-31", "ovs_interfaceid": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.003507] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384838, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.050958] env[62585]: DEBUG oslo_vmware.api [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384839, 'name': PowerOnVM_Task, 'duration_secs': 0.494304} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.051276] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.051484] env[62585]: INFO nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Took 8.21 seconds to spawn the instance on the hypervisor. [ 885.051738] env[62585]: DEBUG nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.052564] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b482fd-3dbd-46d3-a981-2468396b84af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.056873] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.868s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.060412] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.781s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.060681] env[62585]: DEBUG nova.objects.instance [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lazy-loading 'resources' on Instance uuid 95de3c81-b764-4594-af86-66df7814d7aa {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.076278] env[62585]: INFO nova.scheduler.client.report [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted allocations for instance 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f [ 885.098627] env[62585]: DEBUG oslo_concurrency.lockutils [req-72cf3b00-7b96-4093-ad76-cb5d99fa058f req-b226e85f-9a45-4397-afd1-f158592becac service nova] Releasing lock "refresh_cache-891e5a42-3681-47eb-ac88-015fa21a6580" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.315138] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52508092-801f-034e-1b50-3e4bb2ad11c8, 'name': SearchDatastore_Task, 'duration_secs': 0.013642} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.315312] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.315557] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.315824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.315974] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.316174] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.316464] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5efb0d6-c6c5-4d7c-bb77-116075a5deb3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.326189] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.326375] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.327367] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aad3f75a-edf6-4e42-9647-7aca09d1195d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.333383] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 885.333383] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c970f9-f64f-2178-9c27-6a16982e8539" [ 885.333383] env[62585]: _type = "Task" [ 885.333383] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.342058] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c970f9-f64f-2178-9c27-6a16982e8539, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.502780] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384838, 'name': CloneVM_Task} progress is 95%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.504800] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.574988] env[62585]: INFO nova.compute.manager [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Took 20.16 seconds to build instance. [ 885.588826] env[62585]: DEBUG oslo_concurrency.lockutils [None req-72fddc0f-9c96-4f5b-8aac-c0ba73230548 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "62e3b57b-6c9c-4f3c-8a47-efb5fbed801f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.409s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.790754] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5c74b6-d0e0-443a-b5f0-241b54eddd88 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.800631] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d3733e-23d3-4ea2-9700-0c8dfcbf9fcf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.839730] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0466e33-dbf9-4cbf-b606-17dc507b07d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.850151] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c970f9-f64f-2178-9c27-6a16982e8539, 'name': SearchDatastore_Task, 'duration_secs': 0.009846} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.853950] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf422352-f5ed-4613-8a2e-12b977d269cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.857401] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25d6eea-55ec-408d-b3b2-5d46b6689932 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.874911] env[62585]: DEBUG nova.compute.provider_tree [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.876931] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 885.876931] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52496d1c-ca89-cdf8-48be-de2c63c6029a" [ 885.876931] env[62585]: _type = "Task" [ 885.876931] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.888300] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52496d1c-ca89-cdf8-48be-de2c63c6029a, 'name': SearchDatastore_Task, 'duration_secs': 0.012256} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.888608] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.888883] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 891e5a42-3681-47eb-ac88-015fa21a6580/891e5a42-3681-47eb-ac88-015fa21a6580.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.889261] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe9a2655-3663-4402-bcab-d2ffd272e6d8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.897947] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 885.897947] env[62585]: value = "task-1384840" [ 885.897947] env[62585]: _type = "Task" [ 885.897947] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.907429] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.004204] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384838, 'name': CloneVM_Task, 'duration_secs': 1.312409} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.004550] env[62585]: INFO nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Created linked-clone VM from snapshot [ 886.005388] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a04d1a8-83a8-494b-8d0e-4768e024a891 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.018344] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Uploading image b48cf53a-b7aa-4959-b3ba-d006efd72e8a {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 886.056430] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 886.056430] env[62585]: value = "vm-294031" [ 886.056430] env[62585]: _type = "VirtualMachine" [ 886.056430] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 886.058839] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b18032cd-a6ec-470a-85c9-387076c6bb72 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.065819] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lease: (returnval){ [ 886.065819] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52445fe1-2b99-cbcd-3302-7a856834581d" [ 886.065819] env[62585]: _type = "HttpNfcLease" [ 886.065819] env[62585]: } obtained for exporting VM: (result){ [ 886.065819] env[62585]: value = "vm-294031" [ 886.065819] env[62585]: _type = "VirtualMachine" [ 886.065819] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 886.066310] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the lease: (returnval){ [ 886.066310] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52445fe1-2b99-cbcd-3302-7a856834581d" [ 886.066310] env[62585]: _type = "HttpNfcLease" [ 886.066310] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 886.075890] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 886.075890] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52445fe1-2b99-cbcd-3302-7a856834581d" [ 886.075890] env[62585]: _type = "HttpNfcLease" [ 886.075890] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 886.077508] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7edac1eb-d646-4208-8508-be407d698b39 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "ddb1103d-a846-4229-b441-de45424b4ec9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.675s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.378365] env[62585]: DEBUG nova.scheduler.client.report [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.413257] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384840, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.578079] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 886.578079] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52445fe1-2b99-cbcd-3302-7a856834581d" [ 886.578079] env[62585]: _type = "HttpNfcLease" [ 886.578079] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 886.578079] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 886.578079] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52445fe1-2b99-cbcd-3302-7a856834581d" [ 886.578079] env[62585]: _type = "HttpNfcLease" [ 886.578079] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 886.581286] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2c32ec-a35d-47c2-8d54-13f41e47da09 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.597686] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529f9d17-94b4-4f38-b370-3f48b7bb585d/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 886.597868] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529f9d17-94b4-4f38-b370-3f48b7bb585d/disk-0.vmdk for reading. {{(pid=62585) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 886.700485] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d7e0cba0-18ee-41c4-98e7-fca6d27ab971 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.884291] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.887816] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.410s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.887816] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.887816] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 886.887816] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.683s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.888259] env[62585]: DEBUG nova.objects.instance [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lazy-loading 'resources' on Instance uuid b2d2a012-a62f-4237-95c3-d7153d6b223c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.889724] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82465f78-d3e8-45ae-b97e-3ce6df676a2b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.910045] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356ae58d-0d0c-4a1d-b3ec-921160893560 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.917752] env[62585]: INFO nova.scheduler.client.report [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance 95de3c81-b764-4594-af86-66df7814d7aa [ 886.938297] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732666} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.939299] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18290852-481b-4f1d-9de7-914549e89ec3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.942288] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 891e5a42-3681-47eb-ac88-015fa21a6580/891e5a42-3681-47eb-ac88-015fa21a6580.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.943392] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.943392] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a92b1d6-b807-42f7-ad1a-d758bc3d7445 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.952458] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c3f17d-4690-4653-924d-5470ba135e01 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.956598] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 886.956598] env[62585]: value = "task-1384842" [ 886.956598] env[62585]: _type = "Task" [ 886.956598] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.996129] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181033MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 886.996350] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.003091] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384842, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.021476] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99e5392-7ea4-46ae-84b7-370856e3effb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.869360] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 0 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 887.874237] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f007ef5-3ef9-45d0-b2cc-07e66e9037ea tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "95de3c81-b764-4594-af86-66df7814d7aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.505s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.880139] env[62585]: DEBUG nova.compute.manager [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-changed-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 887.880832] env[62585]: DEBUG nova.compute.manager [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing instance network info cache due to event network-changed-840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 887.880832] env[62585]: DEBUG oslo_concurrency.lockutils [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.880987] env[62585]: DEBUG oslo_concurrency.lockutils [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.882177] env[62585]: DEBUG nova.network.neutron [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing network info cache for port 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.891450] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076308} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.891746] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.892778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef81a89-5ed2-4a28-8581-01157156cd82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.918611] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 891e5a42-3681-47eb-ac88-015fa21a6580/891e5a42-3681-47eb-ac88-015fa21a6580.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.920031] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2a82110-bfd5-4f7c-b290-ad366096256a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.946592] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 887.946592] env[62585]: value = "task-1384843" [ 887.946592] env[62585]: _type = "Task" [ 887.946592] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.957404] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.157184] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b86b147-ae18-47e1-8666-9f076ad70f41 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.165802] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4f5c89-1799-4012-939d-be4bc086cf2c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.201747] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fa0b2a-52a1-4daf-b2b5-e6f5e4f7248e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.211176] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb6e52f-1c53-469f-9e32-25dd8baf1d37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.227424] env[62585]: DEBUG nova.compute.provider_tree [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.389217] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.389591] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79c62d6b-e9c6-4a29-8e6c-cfb37d2c21ee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.398424] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 888.398424] env[62585]: value = "task-1384844" [ 888.398424] env[62585]: _type = "Task" [ 888.398424] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.409121] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.458722] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384843, 'name': ReconfigVM_Task, 'duration_secs': 0.491488} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.459036] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 891e5a42-3681-47eb-ac88-015fa21a6580/891e5a42-3681-47eb-ac88-015fa21a6580.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.459909] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-860453fa-d6ac-4aee-853e-c8c16bbb82e9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.468197] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 888.468197] env[62585]: value = "task-1384845" [ 888.468197] env[62585]: _type = "Task" [ 888.468197] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.478661] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384845, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.733024] env[62585]: DEBUG nova.scheduler.client.report [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 888.909660] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384844, 'name': PowerOffVM_Task, 'duration_secs': 0.311601} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.910076] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.910433] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 17 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 888.983441] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384845, 'name': Rename_Task, 'duration_secs': 0.213514} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.983736] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.984016] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-733749f7-4eb2-4ab7-96de-e1d0536370ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.995055] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 888.995055] env[62585]: value = "task-1384846" [ 888.995055] env[62585]: _type = "Task" [ 888.995055] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.037062] env[62585]: DEBUG nova.network.neutron [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updated VIF entry in instance network info cache for port 840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.037210] env[62585]: DEBUG nova.network.neutron [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.211849] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384846, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.236979] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.349s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.240354] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.313s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.242310] env[62585]: INFO nova.compute.claims [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.286315] env[62585]: INFO nova.scheduler.client.report [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Deleted allocations for instance b2d2a012-a62f-4237-95c3-d7153d6b223c [ 889.418162] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.418474] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.418570] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.418749] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.418894] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.419063] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.419279] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.419494] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.419745] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.419948] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.420147] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.425259] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d18d64f7-e556-4d92-8c3c-385e227356ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.444436] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 889.444436] env[62585]: value = "task-1384847" [ 889.444436] env[62585]: _type = "Task" [ 889.444436] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.455681] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384847, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.507409] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384846, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.512475] env[62585]: DEBUG nova.compute.manager [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 889.512721] env[62585]: DEBUG nova.compute.manager [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing instance network info cache due to event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 889.512990] env[62585]: DEBUG oslo_concurrency.lockutils [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.513165] env[62585]: DEBUG oslo_concurrency.lockutils [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.513594] env[62585]: DEBUG nova.network.neutron [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 889.530719] env[62585]: DEBUG nova.compute.manager [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 889.530927] env[62585]: DEBUG nova.compute.manager [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing instance network info cache due to event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 889.531194] env[62585]: DEBUG oslo_concurrency.lockutils [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.540271] env[62585]: DEBUG oslo_concurrency.lockutils [req-97b67453-036d-4dcc-8a0a-3cda1545977b req-d9e9b88f-c7e1-456b-9e91-999f9d50548e service nova] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.796459] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aaafd0fa-3cf5-4e76-8b8e-9d70b9797f7e tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "b2d2a012-a62f-4237-95c3-d7153d6b223c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.485s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.958190] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384847, 'name': ReconfigVM_Task, 'duration_secs': 0.268669} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.958619] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 33 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 890.007473] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384846, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.347581] env[62585]: DEBUG nova.network.neutron [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updated VIF entry in instance network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 890.347942] env[62585]: DEBUG nova.network.neutron [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.451691] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557e374b-6d4c-4af2-b299-bbae93a90260 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.460034] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fffbea6-8536-4be4-a801-989c1559dd1b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.465740] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 890.466090] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 890.466191] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.466394] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 890.466567] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.466725] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 890.466986] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 890.467263] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 890.467452] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 890.467642] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 890.467800] env[62585]: DEBUG nova.virt.hardware [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 890.473247] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Reconfiguring VM instance instance-0000004e to detach disk 2000 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.498317] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48300c1f-cad6-4947-926b-44da08ba15da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.516070] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a517d26e-dfb0-45ea-8fa8-b4b64cc013b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.527297] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384846, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.529628] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09202cf2-1df1-4164-956e-fbc696a40c2d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.533702] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 890.533702] env[62585]: value = "task-1384848" [ 890.533702] env[62585]: _type = "Task" [ 890.533702] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.545732] env[62585]: DEBUG nova.compute.provider_tree [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.554379] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.850551] env[62585]: DEBUG oslo_concurrency.lockutils [req-fd71c24a-8e16-4884-9228-34c1d1e9d8d3 req-a1e4f673-1142-4760-addd-e4a863891411 service nova] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.851067] env[62585]: DEBUG oslo_concurrency.lockutils [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.851302] env[62585]: DEBUG nova.network.neutron [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.023117] env[62585]: DEBUG oslo_vmware.api [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384846, 'name': PowerOnVM_Task, 'duration_secs': 1.624447} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.023117] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.023331] env[62585]: INFO nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Took 9.95 seconds to spawn the instance on the hypervisor. [ 891.023434] env[62585]: DEBUG nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 891.024342] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5329741e-f2b9-4023-a343-f26e104cbc59 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.043596] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384848, 'name': ReconfigVM_Task, 'duration_secs': 0.258319} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.043920] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Reconfigured VM instance instance-0000004e to detach disk 2000 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 891.044741] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b61074-07a5-4148-b0db-e6765e7b8d5e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.049665] env[62585]: DEBUG nova.scheduler.client.report [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.070631] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7/d96a04d7-b07f-439d-aafa-09dc70a4d1a7.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.072183] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc3fa0f4-3187-440a-8fd3-9fd6fb9dc88f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.108116] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 891.108116] env[62585]: value = "task-1384849" [ 891.108116] env[62585]: _type = "Task" [ 891.108116] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.117641] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.222060] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b77ee5-632d-e30b-741c-50c065876832/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 891.223106] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d0dddb-76ee-42fa-91bd-67fb8da95151 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.232202] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b77ee5-632d-e30b-741c-50c065876832/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 891.232408] env[62585]: ERROR oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b77ee5-632d-e30b-741c-50c065876832/disk-0.vmdk due to incomplete transfer. [ 891.232672] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ef647bdd-469e-40be-9d74-831518a19a91 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.240880] env[62585]: DEBUG oslo_vmware.rw_handles [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b77ee5-632d-e30b-741c-50c065876832/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 891.241128] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Uploaded image 5d78a50d-b3a6-4aa7-8847-eb087b11a97e to the Glance image server {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 891.243801] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Destroying the VM {{(pid=62585) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 891.244110] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6f20dfa8-6724-41ac-a03d-62f3600a4c8e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.252418] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 891.252418] env[62585]: value = "task-1384850" [ 891.252418] env[62585]: _type = "Task" [ 891.252418] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.261935] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384850, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.548865] env[62585]: INFO nova.compute.manager [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Took 24.01 seconds to build instance. [ 891.572920] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.576627] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 891.577989] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.347s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.579772] env[62585]: INFO nova.compute.claims [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.624240] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384849, 'name': ReconfigVM_Task, 'duration_secs': 0.509971} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.625475] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Reconfigured VM instance instance-0000004e to attach disk [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7/d96a04d7-b07f-439d-aafa-09dc70a4d1a7.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.625802] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 50 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 891.717496] env[62585]: DEBUG nova.network.neutron [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updated VIF entry in instance network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.717943] env[62585]: DEBUG nova.network.neutron [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.752544] env[62585]: DEBUG nova.compute.manager [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-changed-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 891.753322] env[62585]: DEBUG nova.compute.manager [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing instance network info cache due to event network-changed-840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 891.753322] env[62585]: DEBUG oslo_concurrency.lockutils [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.753469] env[62585]: DEBUG oslo_concurrency.lockutils [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.753594] env[62585]: DEBUG nova.network.neutron [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing network info cache for port 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.766229] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384850, 'name': Destroy_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.051010] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fa63fbde-a061-4413-9d5d-7ce96e3e3a1d tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "891e5a42-3681-47eb-ac88-015fa21a6580" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.524s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.092875] env[62585]: DEBUG nova.compute.utils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.098444] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 892.098444] env[62585]: DEBUG nova.network.neutron [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.122875] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "01941b61-1960-4360-9dd0-513d5597bc70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.123092] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "01941b61-1960-4360-9dd0-513d5597bc70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.133194] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e7d098-78d8-4c37-9617-9e5b5fe2b64b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.156559] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6293c4a-df95-4548-91dd-478d3ce036b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.175945] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 67 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 892.191186] env[62585]: DEBUG nova.policy [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02348f13253c4a43bc09e7d255bb2b23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbafceca6afd477e8afa38df5790b585', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 892.221360] env[62585]: DEBUG oslo_concurrency.lockutils [req-50428336-7e05-45ec-a12b-b186e0a76aba req-d73c11f2-37d2-4c26-9370-c249ad63ea6e service nova] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.271278] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384850, 'name': Destroy_Task, 'duration_secs': 0.551049} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.271597] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Destroyed the VM [ 892.271968] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deleting Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 892.272247] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d6b6309f-fd7a-4f52-bcd1-7ba31723fec5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.281712] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 892.281712] env[62585]: value = "task-1384851" [ 892.281712] env[62585]: _type = "Task" [ 892.281712] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.292899] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384851, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.539180] env[62585]: DEBUG nova.network.neutron [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Successfully created port: 144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 892.600918] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 892.626416] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 892.645764] env[62585]: DEBUG nova.network.neutron [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updated VIF entry in instance network info cache for port 840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.645764] env[62585]: DEBUG nova.network.neutron [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.743589] env[62585]: DEBUG nova.network.neutron [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Port c92c2d1e-3117-42a1-a5d2-3de9eba6e107 binding to destination host cpu-1 is already ACTIVE {{(pid=62585) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 892.796045] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384851, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.841725] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cdc627-c05a-41b0-ba36-7d886f3de103 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.856526] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cecdfa-ad38-47c0-9e83-88c5e6a80861 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.897342] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41b7377-b838-4d44-8d03-8ba7e0fde97e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.906285] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e04cb6a-d2b8-4d06-9b75-5067ff6f6eff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.922993] env[62585]: DEBUG nova.compute.provider_tree [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.952770] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "8763a058-b453-4f03-9532-7d7e65efdfb2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.953075] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.953296] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "8763a058-b453-4f03-9532-7d7e65efdfb2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.953489] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.953663] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.956164] env[62585]: INFO nova.compute.manager [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Terminating instance [ 892.958500] env[62585]: DEBUG nova.compute.manager [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 892.958692] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.959747] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8260beb6-1791-47f2-b6dd-03f33017e87b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.969360] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.969770] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a179ba4-8ec4-41e4-bf8a-cee7387c39a5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.977887] env[62585]: DEBUG oslo_vmware.api [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 892.977887] env[62585]: value = "task-1384852" [ 892.977887] env[62585]: _type = "Task" [ 892.977887] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.988996] env[62585]: DEBUG oslo_vmware.api [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.019147] env[62585]: DEBUG nova.compute.manager [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.019220] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010f06b1-1a92-4a9f-b7b1-3a4696bf09a0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.149456] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.150154] env[62585]: DEBUG oslo_concurrency.lockutils [req-6d58ef4c-5618-4d78-98a6-6744252c7c1a req-6188c0d9-bc4e-492b-84c6-c6d92245193e service nova] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.296327] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384851, 'name': RemoveSnapshot_Task, 'duration_secs': 0.711296} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.296672] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deleted Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 893.297041] env[62585]: DEBUG nova.compute.manager [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.297994] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d42058-6eda-4d8a-993c-c092c61a014c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.426681] env[62585]: DEBUG nova.scheduler.client.report [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.489394] env[62585]: DEBUG oslo_vmware.api [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384852, 'name': PowerOffVM_Task, 'duration_secs': 0.221575} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.489722] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.489900] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.490188] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf61f83b-b81b-4c0b-8edf-e1ee59479de6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.533870] env[62585]: INFO nova.compute.manager [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] instance snapshotting [ 893.538583] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26184d5a-87d6-42a3-b7ae-59519e35a663 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.558512] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fb2df6-479e-41cc-9cc5-2c4d87ec8c49 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.564973] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.565388] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.565714] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Deleting the datastore file [datastore1] 8763a058-b453-4f03-9532-7d7e65efdfb2 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.568368] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-506801a1-7639-444f-8758-a5928a3ee368 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.578799] env[62585]: DEBUG oslo_vmware.api [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for the task: (returnval){ [ 893.578799] env[62585]: value = "task-1384854" [ 893.578799] env[62585]: _type = "Task" [ 893.578799] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.587736] env[62585]: DEBUG oslo_vmware.api [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.614748] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 893.645207] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 893.645488] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 893.645650] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.645840] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 893.645996] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.646160] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 893.646376] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 893.646540] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 893.646739] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 893.646882] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 893.647074] env[62585]: DEBUG nova.virt.hardware [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 893.647928] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70587036-7150-4317-be2e-36f8e188832b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.656866] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea25ad6b-c6ac-4966-9ddc-e2f8a9717762 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.769576] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.769842] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.770060] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.814166] env[62585]: INFO nova.compute.manager [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Shelve offloading [ 893.815993] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.816286] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fce403e-0ed4-4aa6-abbb-df457b620fd7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.824882] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 893.824882] env[62585]: value = "task-1384855" [ 893.824882] env[62585]: _type = "Task" [ 893.824882] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.834481] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 893.834707] env[62585]: DEBUG nova.compute.manager [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.835519] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218b5583-41d0-4593-9a8d-9cbfbf10f4ba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.843319] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.843510] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.843714] env[62585]: DEBUG nova.network.neutron [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.933283] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.935609] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 893.936842] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.405s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.938608] env[62585]: INFO nova.compute.claims [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 893.987170] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529f9d17-94b4-4f38-b370-3f48b7bb585d/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 893.988310] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afbc750-a08e-42e8-8a19-f17ea66e99c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.995529] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529f9d17-94b4-4f38-b370-3f48b7bb585d/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 893.995711] env[62585]: ERROR oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529f9d17-94b4-4f38-b370-3f48b7bb585d/disk-0.vmdk due to incomplete transfer. [ 893.995944] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c5ce3e73-bfc8-4688-8218-9ff1850bfd34 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.004620] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529f9d17-94b4-4f38-b370-3f48b7bb585d/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 894.004805] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Uploaded image b48cf53a-b7aa-4959-b3ba-d006efd72e8a to the Glance image server {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 894.006608] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Destroying the VM {{(pid=62585) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 894.006865] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a1f0db92-0d36-4e2d-9329-cb3b7514afc1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.013733] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 894.013733] env[62585]: value = "task-1384856" [ 894.013733] env[62585]: _type = "Task" [ 894.013733] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.025400] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384856, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.075092] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Creating Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 894.075463] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ac770372-3b3b-44c0-9245-b125347c1132 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.084801] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 894.084801] env[62585]: value = "task-1384857" [ 894.084801] env[62585]: _type = "Task" [ 894.084801] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.090995] env[62585]: DEBUG oslo_vmware.api [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Task: {'id': task-1384854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30212} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.091638] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.091874] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.092066] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.092245] env[62585]: INFO nova.compute.manager [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 894.092489] env[62585]: DEBUG oslo.service.loopingcall [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.092713] env[62585]: DEBUG nova.compute.manager [-] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 894.092814] env[62585]: DEBUG nova.network.neutron [-] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.098212] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384857, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.269073] env[62585]: DEBUG nova.compute.manager [req-6fe6dfac-9d36-4600-ac3d-f2bffa5ae423 req-e0da66fb-25e9-4445-985b-533678c1e119 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Received event network-vif-plugged-144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.269227] env[62585]: DEBUG oslo_concurrency.lockutils [req-6fe6dfac-9d36-4600-ac3d-f2bffa5ae423 req-e0da66fb-25e9-4445-985b-533678c1e119 service nova] Acquiring lock "4b080cc3-e1cc-4b64-9926-c37b891444f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.269556] env[62585]: DEBUG oslo_concurrency.lockutils [req-6fe6dfac-9d36-4600-ac3d-f2bffa5ae423 req-e0da66fb-25e9-4445-985b-533678c1e119 service nova] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.269773] env[62585]: DEBUG oslo_concurrency.lockutils [req-6fe6dfac-9d36-4600-ac3d-f2bffa5ae423 req-e0da66fb-25e9-4445-985b-533678c1e119 service nova] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.270037] env[62585]: DEBUG nova.compute.manager [req-6fe6dfac-9d36-4600-ac3d-f2bffa5ae423 req-e0da66fb-25e9-4445-985b-533678c1e119 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] No waiting events found dispatching network-vif-plugged-144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 894.270237] env[62585]: WARNING nova.compute.manager [req-6fe6dfac-9d36-4600-ac3d-f2bffa5ae423 req-e0da66fb-25e9-4445-985b-533678c1e119 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Received unexpected event network-vif-plugged-144b0acd-f385-4e1c-b8cc-8396bf7f7648 for instance with vm_state building and task_state spawning. [ 894.332981] env[62585]: DEBUG nova.network.neutron [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Successfully updated port: 144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.442262] env[62585]: DEBUG nova.compute.utils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.445746] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 894.446636] env[62585]: DEBUG nova.network.neutron [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.519953] env[62585]: DEBUG nova.policy [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02348f13253c4a43bc09e7d255bb2b23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbafceca6afd477e8afa38df5790b585', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.527978] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384856, 'name': Destroy_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.582989] env[62585]: DEBUG nova.compute.manager [req-6984eded-17cd-4297-ae10-32f462c8c41e req-ed041a7f-99fc-4549-b65c-b48686fd6d01 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Received event network-vif-deleted-597e2cc3-d043-4c6b-a254-2d9838a1ebf9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.583218] env[62585]: INFO nova.compute.manager [req-6984eded-17cd-4297-ae10-32f462c8c41e req-ed041a7f-99fc-4549-b65c-b48686fd6d01 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Neutron deleted interface 597e2cc3-d043-4c6b-a254-2d9838a1ebf9; detaching it from the instance and deleting it from the info cache [ 894.583400] env[62585]: DEBUG nova.network.neutron [req-6984eded-17cd-4297-ae10-32f462c8c41e req-ed041a7f-99fc-4549-b65c-b48686fd6d01 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.600318] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384857, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.673650] env[62585]: DEBUG nova.network.neutron [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.827776] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.827964] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.828158] env[62585]: DEBUG nova.network.neutron [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.837134] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.837134] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.837134] env[62585]: DEBUG nova.network.neutron [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.844158] env[62585]: DEBUG nova.network.neutron [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Successfully created port: 57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.932816] env[62585]: DEBUG nova.network.neutron [-] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.952368] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 895.035020] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384856, 'name': Destroy_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.089652] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a001574-869d-4305-99a5-56adc25d1a05 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.101115] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384857, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.105282] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775ceeb1-d865-4e6b-b811-cd3be745cc1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.137594] env[62585]: DEBUG nova.compute.manager [req-6984eded-17cd-4297-ae10-32f462c8c41e req-ed041a7f-99fc-4549-b65c-b48686fd6d01 service nova] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Detach interface failed, port_id=597e2cc3-d043-4c6b-a254-2d9838a1ebf9, reason: Instance 8763a058-b453-4f03-9532-7d7e65efdfb2 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 895.177329] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.198540] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ffcbd4-efb1-4476-a363-64d0e5d744f5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.208020] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9235795b-67a9-45fc-8ea2-c2d5678d581d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.235637] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad025fc2-6e4c-4f7a-ad99-553b7c1becf5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.243617] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30404ea-aeb1-4a77-b603-664b6bee3496 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.257058] env[62585]: DEBUG nova.compute.provider_tree [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.395910] env[62585]: DEBUG nova.network.neutron [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.435388] env[62585]: INFO nova.compute.manager [-] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Took 1.34 seconds to deallocate network for instance. [ 895.528428] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384856, 'name': Destroy_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.601667] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384857, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.617639] env[62585]: DEBUG nova.network.neutron [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [{"id": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "address": "fa:16:3e:35:20:3f", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc92c2d1e-31", "ovs_interfaceid": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.703972] env[62585]: DEBUG nova.network.neutron [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Updating instance_info_cache with network_info: [{"id": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "address": "fa:16:3e:57:e9:e4", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144b0acd-f3", "ovs_interfaceid": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.760040] env[62585]: DEBUG nova.scheduler.client.report [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.815121] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.816284] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c10ec6-a950-4477-8aff-465b497c1833 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.825471] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.825745] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd21553f-7d6b-4225-802c-56741c3ff447 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.915743] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.916353] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.916655] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleting the datastore file [datastore1] 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.917036] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca7e7025-ee84-4dbd-8428-2e75ba7e56b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.924750] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 895.924750] env[62585]: value = "task-1384859" [ 895.924750] env[62585]: _type = "Task" [ 895.924750] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.934607] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.942791] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.964184] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 895.986611] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.986901] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.987078] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.987295] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.987428] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.987578] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.987784] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.987945] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.988127] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.988367] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.988458] env[62585]: DEBUG nova.virt.hardware [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.989324] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7fb5d8-6278-4195-ae83-660164756978 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.997903] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaf3525-bdf7-43bd-869d-a33283859462 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.027820] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384856, 'name': Destroy_Task, 'duration_secs': 1.592207} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.028100] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Destroyed the VM [ 896.028343] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Deleting Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 896.028594] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7dfa2803-181d-480f-9bc5-84918ca92d61 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.036981] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 896.036981] env[62585]: value = "task-1384860" [ 896.036981] env[62585]: _type = "Task" [ 896.036981] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.046097] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384860, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.098051] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384857, 'name': CreateSnapshot_Task, 'duration_secs': 1.62651} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.098418] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Created Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 896.099220] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02b6be8-df58-4043-b6d3-56e341864b3e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.121793] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.207671] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.208053] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Instance network_info: |[{"id": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "address": "fa:16:3e:57:e9:e4", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144b0acd-f3", "ovs_interfaceid": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 896.208572] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:e9:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '144b0acd-f385-4e1c-b8cc-8396bf7f7648', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.216108] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Creating folder: Project (dbafceca6afd477e8afa38df5790b585). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 896.216363] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bfe8fd5-c3bd-4568-92f0-7b1e05c2ed28 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.230105] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Created folder: Project (dbafceca6afd477e8afa38df5790b585) in parent group-v293962. [ 896.230316] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Creating folder: Instances. Parent ref: group-v294033. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 896.230568] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca3c20d0-f9c4-488f-a0ff-e48c0daf995d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.243113] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Created folder: Instances in parent group-v294033. [ 896.243388] env[62585]: DEBUG oslo.service.loopingcall [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.243616] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.243933] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-372aa778-3290-4196-9eed-48b6b671be63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.265315] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.265996] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 896.270364] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.274s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.272214] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.272214] env[62585]: value = "task-1384863" [ 896.272214] env[62585]: _type = "Task" [ 896.272214] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.284017] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384863, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.340136] env[62585]: DEBUG nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Received event network-changed-144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.340435] env[62585]: DEBUG nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Refreshing instance network info cache due to event network-changed-144b0acd-f385-4e1c-b8cc-8396bf7f7648. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 896.340857] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Acquiring lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.341206] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Acquired lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.342192] env[62585]: DEBUG nova.network.neutron [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Refreshing network info cache for port 144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.438262] env[62585]: DEBUG oslo_vmware.api [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160895} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.438793] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.439375] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.439375] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.464485] env[62585]: INFO nova.scheduler.client.report [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleted allocations for instance 6057e13b-71df-458d-b6ed-c139a8c57836 [ 896.526512] env[62585]: DEBUG nova.network.neutron [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Successfully updated port: 57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.548799] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384860, 'name': RemoveSnapshot_Task, 'duration_secs': 0.448115} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.548799] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Deleted Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 896.549770] env[62585]: DEBUG nova.compute.manager [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 896.550285] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e615ff-cfe5-4855-b005-bada2834a4f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.618520] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Creating linked-clone VM from snapshot {{(pid=62585) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 896.619653] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e81217aa-c91d-4ce8-b241-2baee832e83b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.629473] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 896.629473] env[62585]: value = "task-1384864" [ 896.629473] env[62585]: _type = "Task" [ 896.629473] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.640559] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384864, 'name': CloneVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.642695] env[62585]: DEBUG nova.compute.manager [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Received event network-vif-plugged-57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.642907] env[62585]: DEBUG oslo_concurrency.lockutils [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] Acquiring lock "2cf85b78-df04-40d0-a7db-5e8979574d0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.643301] env[62585]: DEBUG oslo_concurrency.lockutils [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.643417] env[62585]: DEBUG oslo_concurrency.lockutils [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.643536] env[62585]: DEBUG nova.compute.manager [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] No waiting events found dispatching network-vif-plugged-57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 896.643737] env[62585]: WARNING nova.compute.manager [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Received unexpected event network-vif-plugged-57d68249-852c-488a-ac40-8de77dec7712 for instance with vm_state building and task_state spawning. [ 896.643909] env[62585]: DEBUG nova.compute.manager [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Received event network-changed-57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.644087] env[62585]: DEBUG nova.compute.manager [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Refreshing instance network info cache due to event network-changed-57d68249-852c-488a-ac40-8de77dec7712. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 896.644276] env[62585]: DEBUG oslo_concurrency.lockutils [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] Acquiring lock "refresh_cache-2cf85b78-df04-40d0-a7db-5e8979574d0a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.644412] env[62585]: DEBUG oslo_concurrency.lockutils [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] Acquired lock "refresh_cache-2cf85b78-df04-40d0-a7db-5e8979574d0a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.644568] env[62585]: DEBUG nova.network.neutron [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Refreshing network info cache for port 57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.647291] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55aa53c-4707-4a05-bee1-738bd5bf9738 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.671338] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1558ed-2fc7-41ef-b0de-8246dbc8ded8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.679799] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 83 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 896.779775] env[62585]: DEBUG nova.compute.utils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 896.781374] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 896.781565] env[62585]: DEBUG nova.network.neutron [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 896.794647] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384863, 'name': CreateVM_Task, 'duration_secs': 0.502507} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.796025] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.796604] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.796846] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.797376] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.798033] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7233eb0-8c5a-456b-b6df-74dd82be929b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.803264] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 896.803264] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a432bb-291f-77b9-03b1-7731b62013c8" [ 896.803264] env[62585]: _type = "Task" [ 896.803264] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.812863] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a432bb-291f-77b9-03b1-7731b62013c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.832921] env[62585]: DEBUG nova.policy [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02348f13253c4a43bc09e7d255bb2b23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbafceca6afd477e8afa38df5790b585', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 896.971477] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.028745] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "refresh_cache-2cf85b78-df04-40d0-a7db-5e8979574d0a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.064460] env[62585]: INFO nova.compute.manager [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Shelve offloading [ 897.066281] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.066545] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d62aea7f-dfdb-4a50-afcf-642b9d5a43b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.077122] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 897.077122] env[62585]: value = "task-1384865" [ 897.077122] env[62585]: _type = "Task" [ 897.077122] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.087222] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.087366] env[62585]: DEBUG nova.network.neutron [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Updated VIF entry in instance network info cache for port 144b0acd-f385-4e1c-b8cc-8396bf7f7648. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.087755] env[62585]: DEBUG nova.network.neutron [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Updating instance_info_cache with network_info: [{"id": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "address": "fa:16:3e:57:e9:e4", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144b0acd-f3", "ovs_interfaceid": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.101823] env[62585]: DEBUG nova.network.neutron [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Successfully created port: 4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.105776] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "interface-a634a80e-d90a-4ce3-8233-75657a7754be-4b5af1c5-20c1-446c-aad5-023ac683f7e8" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.105911] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-a634a80e-d90a-4ce3-8233-75657a7754be-4b5af1c5-20c1-446c-aad5-023ac683f7e8" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.106169] env[62585]: DEBUG nova.objects.instance [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lazy-loading 'flavor' on Instance uuid a634a80e-d90a-4ce3-8233-75657a7754be {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.144351] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384864, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.180475] env[62585]: DEBUG nova.network.neutron [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.186372] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.186682] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc95d496-646b-434f-a7b0-a74903c0bea6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.196480] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 897.196480] env[62585]: value = "task-1384866" [ 897.196480] env[62585]: _type = "Task" [ 897.196480] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.205360] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.257133] env[62585]: DEBUG nova.network.neutron [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.282365] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Applying migration context for instance d96a04d7-b07f-439d-aafa-09dc70a4d1a7 as it has an incoming, in-progress migration fd35d3e7-abab-46e9-98a6-bb0a3cbf68a6. Migration status is post-migrating {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 897.284058] env[62585]: INFO nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating resource usage from migration fd35d3e7-abab-46e9-98a6-bb0a3cbf68a6 [ 897.288197] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 897.314722] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a432bb-291f-77b9-03b1-7731b62013c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011482} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.315785] env[62585]: WARNING nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 8763a058-b453-4f03-9532-7d7e65efdfb2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 897.315971] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance abf4a205-fcee-46e4-85b6-10a452cc0312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316360] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance a634a80e-d90a-4ce3-8233-75657a7754be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316360] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 70ac6289-2f14-4fb0-a811-97d76cafc532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316503] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance f1bfef38-b6d0-40d0-8e60-310f8a75dd78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316503] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance ddb1103d-a846-4229-b441-de45424b4ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316643] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 891e5a42-3681-47eb-ac88-015fa21a6580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316733] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Migration fd35d3e7-abab-46e9-98a6-bb0a3cbf68a6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 897.316829] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d96a04d7-b07f-439d-aafa-09dc70a4d1a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.316948] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 4b080cc3-e1cc-4b64-9926-c37b891444f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.317073] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 2cf85b78-df04-40d0-a7db-5e8979574d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.317181] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance fcbbc06c-71fa-4891-8bfc-0de746b9e622 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 897.318644] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.318894] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.319164] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.319319] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.319502] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.320083] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bb9d77f-3892-4d0a-9790-7d427f5556aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.341949] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.342224] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.342947] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e4e5c27-1bfe-4cb3-9443-a42804c99a97 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.349035] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 897.349035] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52741cc4-1b5b-8e0c-61d6-261e86ac8efc" [ 897.349035] env[62585]: _type = "Task" [ 897.349035] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.357658] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52741cc4-1b5b-8e0c-61d6-261e86ac8efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.590707] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Releasing lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.591010] env[62585]: DEBUG nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-vif-unplugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.591232] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.591455] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.591640] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.591893] env[62585]: DEBUG nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] No waiting events found dispatching network-vif-unplugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 897.591999] env[62585]: WARNING nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received unexpected event network-vif-unplugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 for instance with vm_state shelved and task_state shelving_offloading. [ 897.592204] env[62585]: DEBUG nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.592364] env[62585]: DEBUG nova.compute.manager [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing instance network info cache due to event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 897.592561] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.592705] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.592878] env[62585]: DEBUG nova.network.neutron [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.594246] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 897.594431] env[62585]: DEBUG nova.compute.manager [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.595447] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a600e18c-4099-4ced-9965-4e2055590352 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.603256] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.603452] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.603743] env[62585]: DEBUG nova.network.neutron [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.643299] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384864, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.707230] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384866, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.760156] env[62585]: DEBUG oslo_concurrency.lockutils [req-40fe21a8-62e4-499f-a0b9-6ac64711a8e1 req-c6bd1f39-4571-41e6-930c-8a658ed2f6a6 service nova] Releasing lock "refresh_cache-2cf85b78-df04-40d0-a7db-5e8979574d0a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.760632] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "refresh_cache-2cf85b78-df04-40d0-a7db-5e8979574d0a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.760810] env[62585]: DEBUG nova.network.neutron [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.768593] env[62585]: DEBUG nova.objects.instance [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lazy-loading 'pci_requests' on Instance uuid a634a80e-d90a-4ce3-8233-75657a7754be {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.821653] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 01941b61-1960-4360-9dd0-513d5597bc70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 897.821653] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 897.821653] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2944MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 897.860457] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52741cc4-1b5b-8e0c-61d6-261e86ac8efc, 'name': SearchDatastore_Task, 'duration_secs': 0.025764} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.864762] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a1b5fde-bbb9-4e2c-b9ba-c5c115a5ab8d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.870926] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 897.870926] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52796c44-82ce-60b4-3640-739e38771bdf" [ 897.870926] env[62585]: _type = "Task" [ 897.870926] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.880145] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52796c44-82ce-60b4-3640-739e38771bdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.011561] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc28ea38-216d-4ea5-b374-ec0a42f0739d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.020319] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f26940-1c51-4560-9aec-e83db4e0e946 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.051113] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c8a8ea-81c9-4a53-8665-007735393afc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.059869] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74a8d37-55c0-406e-bf25-ec81fdf5ff06 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.075220] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.143751] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384864, 'name': CloneVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.208072] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384866, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.271891] env[62585]: DEBUG nova.objects.base [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 898.271891] env[62585]: DEBUG nova.network.neutron [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 898.300301] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 898.315309] env[62585]: DEBUG nova.network.neutron [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.327896] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.329418] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.329775] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.329918] env[62585]: DEBUG nova.virt.hardware [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.330847] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f21fc5-6c80-4c7c-a0b0-e967bcdedb05 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.341772] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2588b3c6-0643-47d9-a77c-99635963addb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.381670] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52796c44-82ce-60b4-3640-739e38771bdf, 'name': SearchDatastore_Task, 'duration_secs': 0.030208} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.381999] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.382300] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 4b080cc3-e1cc-4b64-9926-c37b891444f5/4b080cc3-e1cc-4b64-9926-c37b891444f5.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.382570] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc3553a3-0485-4c30-a9d8-f66c20ca4ba6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.391111] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 898.391111] env[62585]: value = "task-1384867" [ 898.391111] env[62585]: _type = "Task" [ 898.391111] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.400362] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.475191] env[62585]: DEBUG nova.policy [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26abf4eaa71482b8fd3c6425a9c683d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48929b5f0c2c41ddade223ab57002fc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 898.527687] env[62585]: DEBUG nova.network.neutron [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Updating instance_info_cache with network_info: [{"id": "57d68249-852c-488a-ac40-8de77dec7712", "address": "fa:16:3e:d1:d6:2f", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57d68249-85", "ovs_interfaceid": "57d68249-852c-488a-ac40-8de77dec7712", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.578269] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.587835] env[62585]: DEBUG nova.network.neutron [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updating instance_info_cache with network_info: [{"id": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "address": "fa:16:3e:df:d6:69", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969d2b65-14", "ovs_interfaceid": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.634654] env[62585]: DEBUG nova.network.neutron [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updated VIF entry in instance network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.635028] env[62585]: DEBUG nova.network.neutron [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.646616] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384864, 'name': CloneVM_Task, 'duration_secs': 1.518576} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.646920] env[62585]: INFO nova.virt.vmwareapi.vmops [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Created linked-clone VM from snapshot [ 898.647756] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63bdf87a-3be0-4b85-b44a-cccd22bcbebf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.657222] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Uploading image 310691a0-fca0-4934-a7c2-2e7b96be6e6c {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 898.687642] env[62585]: DEBUG nova.network.neutron [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Successfully updated port: 4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 898.700248] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 898.700248] env[62585]: value = "vm-294036" [ 898.700248] env[62585]: _type = "VirtualMachine" [ 898.700248] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 898.700767] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4892adf6-ed23-46cb-835a-17a9b3e0f1e0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.713182] env[62585]: DEBUG oslo_vmware.api [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384866, 'name': PowerOnVM_Task, 'duration_secs': 1.177173} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.714513] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.714719] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0226403-c90a-4bda-ac2a-a633e43ee78a tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance 'd96a04d7-b07f-439d-aafa-09dc70a4d1a7' progress to 100 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 898.718220] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lease: (returnval){ [ 898.718220] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d521-91db-795b-46c9-6dd7cab618e1" [ 898.718220] env[62585]: _type = "HttpNfcLease" [ 898.718220] env[62585]: } obtained for exporting VM: (result){ [ 898.718220] env[62585]: value = "vm-294036" [ 898.718220] env[62585]: _type = "VirtualMachine" [ 898.718220] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 898.718926] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the lease: (returnval){ [ 898.718926] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d521-91db-795b-46c9-6dd7cab618e1" [ 898.718926] env[62585]: _type = "HttpNfcLease" [ 898.718926] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 898.726011] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 898.726011] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d521-91db-795b-46c9-6dd7cab618e1" [ 898.726011] env[62585]: _type = "HttpNfcLease" [ 898.726011] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 898.903248] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384867, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.031248] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "refresh_cache-2cf85b78-df04-40d0-a7db-5e8979574d0a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.031542] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Instance network_info: |[{"id": "57d68249-852c-488a-ac40-8de77dec7712", "address": "fa:16:3e:d1:d6:2f", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57d68249-85", "ovs_interfaceid": "57d68249-852c-488a-ac40-8de77dec7712", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 899.032064] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:d6:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57d68249-852c-488a-ac40-8de77dec7712', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.040087] env[62585]: DEBUG oslo.service.loopingcall [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 899.040313] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.040557] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59a99d9a-fa80-4d4e-ba28-33dddb31c186 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.063211] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.063211] env[62585]: value = "task-1384869" [ 899.063211] env[62585]: _type = "Task" [ 899.063211] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.074464] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384869, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.082633] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 899.082911] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.813s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.083216] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.934s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.085065] env[62585]: INFO nova.compute.claims [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.090487] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.140837] env[62585]: DEBUG oslo_concurrency.lockutils [req-0aa1196e-4113-477c-9811-23b090fe49ef req-38ccf1c8-3f51-428f-a90a-5e45dfa1fa3a service nova] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.192142] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "refresh_cache-fcbbc06c-71fa-4891-8bfc-0de746b9e622" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.192381] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "refresh_cache-fcbbc06c-71fa-4891-8bfc-0de746b9e622" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.192468] env[62585]: DEBUG nova.network.neutron [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.234612] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.234612] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d521-91db-795b-46c9-6dd7cab618e1" [ 899.234612] env[62585]: _type = "HttpNfcLease" [ 899.234612] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 899.234994] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 899.234994] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d521-91db-795b-46c9-6dd7cab618e1" [ 899.234994] env[62585]: _type = "HttpNfcLease" [ 899.234994] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 899.235889] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa125c30-cd30-4b3c-9e84-c5f13d2114c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.251214] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52aad523-4213-ff83-4871-103d8afe2011/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 899.251457] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52aad523-4213-ff83-4871-103d8afe2011/disk-0.vmdk for reading. {{(pid=62585) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 899.359180] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a324bc2c-1b7e-414f-a872-6a46ae5184bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.405114] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.900298} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.405408] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 4b080cc3-e1cc-4b64-9926-c37b891444f5/4b080cc3-e1cc-4b64-9926-c37b891444f5.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.405632] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.405892] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f194ce4e-027e-4530-abb5-62ff85164b18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.413934] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 899.413934] env[62585]: value = "task-1384870" [ 899.413934] env[62585]: _type = "Task" [ 899.413934] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.428166] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.577609] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384869, 'name': CreateVM_Task, 'duration_secs': 0.498621} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.577786] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.578612] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.578806] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.579171] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.579442] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-955cb94e-f687-40df-8975-a3454f937515 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.586700] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 899.586700] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527a200d-3bb1-7858-896d-c5bb76c0cf73" [ 899.586700] env[62585]: _type = "Task" [ 899.586700] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.600958] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527a200d-3bb1-7858-896d-c5bb76c0cf73, 'name': SearchDatastore_Task, 'duration_secs': 0.010136} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.600958] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.600958] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.600958] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.600958] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.600958] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.600958] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa57e8e3-e830-4471-9890-6418a450ae20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.610156] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.610386] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.611253] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b71bba39-a8e2-4df9-8e17-ca293793c4b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.620781] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 899.620781] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527abf54-aa47-a7d9-2e77-2e3d2ae42fbc" [ 899.620781] env[62585]: _type = "Task" [ 899.620781] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.629654] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527abf54-aa47-a7d9-2e77-2e3d2ae42fbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.631276] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.632298] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fac5d4-919e-4aaa-bb05-c478c5620e24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.638369] env[62585]: DEBUG nova.compute.manager [req-3a81b068-fd8b-464d-8e5c-ada288be5dbf req-54f64074-640b-4c2a-bdd2-6b9269e7ebdc service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Received event network-vif-plugged-4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 899.638509] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a81b068-fd8b-464d-8e5c-ada288be5dbf req-54f64074-640b-4c2a-bdd2-6b9269e7ebdc service nova] Acquiring lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.638665] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a81b068-fd8b-464d-8e5c-ada288be5dbf req-54f64074-640b-4c2a-bdd2-6b9269e7ebdc service nova] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.638828] env[62585]: DEBUG oslo_concurrency.lockutils [req-3a81b068-fd8b-464d-8e5c-ada288be5dbf req-54f64074-640b-4c2a-bdd2-6b9269e7ebdc service nova] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.638995] env[62585]: DEBUG nova.compute.manager [req-3a81b068-fd8b-464d-8e5c-ada288be5dbf req-54f64074-640b-4c2a-bdd2-6b9269e7ebdc service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] No waiting events found dispatching network-vif-plugged-4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 899.639184] env[62585]: WARNING nova.compute.manager [req-3a81b068-fd8b-464d-8e5c-ada288be5dbf req-54f64074-640b-4c2a-bdd2-6b9269e7ebdc service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Received unexpected event network-vif-plugged-4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 for instance with vm_state building and task_state spawning. [ 899.641949] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.642213] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02dbb97c-0495-4061-a5ea-1e27f5318368 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.711577] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.711996] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.712228] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleting the datastore file [datastore1] f1bfef38-b6d0-40d0-8e60-310f8a75dd78 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.712598] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8efe619-d18c-427a-afd0-fb8ed89c96e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.719888] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 899.719888] env[62585]: value = "task-1384872" [ 899.719888] env[62585]: _type = "Task" [ 899.719888] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.733652] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.734594] env[62585]: DEBUG nova.network.neutron [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.880234] env[62585]: DEBUG nova.network.neutron [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Updating instance_info_cache with network_info: [{"id": "4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5", "address": "fa:16:3e:3b:b0:24", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e6e9eb6-db", "ovs_interfaceid": "4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.926228] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094057} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.926500] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.927423] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16d93ce-4cb4-4bcb-8298-0ad39ead7c15 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.952680] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 4b080cc3-e1cc-4b64-9926-c37b891444f5/4b080cc3-e1cc-4b64-9926-c37b891444f5.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.953130] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b89a5bc-6932-4e21-a10f-0a1665bb00f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.976615] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 899.976615] env[62585]: value = "task-1384873" [ 899.976615] env[62585]: _type = "Task" [ 899.976615] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.988513] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.140567] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527abf54-aa47-a7d9-2e77-2e3d2ae42fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.009124} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.144021] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff77174-5e2d-4349-869f-bb5055b66ee2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.154062] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 900.154062] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528623b4-d6fb-f559-b192-c0aa5a88c56b" [ 900.154062] env[62585]: _type = "Task" [ 900.154062] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.169184] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528623b4-d6fb-f559-b192-c0aa5a88c56b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.242674] env[62585]: DEBUG oslo_vmware.api [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379315} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.243951] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.247597] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.247597] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.259596] env[62585]: DEBUG nova.network.neutron [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Successfully updated port: 4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.297643] env[62585]: INFO nova.scheduler.client.report [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocations for instance f1bfef38-b6d0-40d0-8e60-310f8a75dd78 [ 900.335543] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628ddec3-736b-4647-9199-e1ffe00f2481 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.345588] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c699648a-d51c-4325-83f8-58dc9da36e8d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.381652] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed722a9b-8353-456b-9628-ca7557760fed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.385540] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "refresh_cache-fcbbc06c-71fa-4891-8bfc-0de746b9e622" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.385633] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Instance network_info: |[{"id": "4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5", "address": "fa:16:3e:3b:b0:24", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e6e9eb6-db", "ovs_interfaceid": "4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 900.386114] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:b0:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 900.396862] env[62585]: DEBUG oslo.service.loopingcall [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.398296] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 900.398722] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72b7a752-aced-4827-b195-b01ee0dd771c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.419505] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd695b54-fe46-4ed0-a2f3-8ca23b74c7d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.426387] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 900.426387] env[62585]: value = "task-1384874" [ 900.426387] env[62585]: _type = "Task" [ 900.426387] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.440858] env[62585]: DEBUG nova.compute.provider_tree [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.448981] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384874, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.451847] env[62585]: DEBUG nova.compute.manager [req-87271341-92dd-44a7-9031-cf57875c7c92 req-8769d77a-5d99-4ea6-8437-55bb6e98b298 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-vif-plugged-4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.452169] env[62585]: DEBUG oslo_concurrency.lockutils [req-87271341-92dd-44a7-9031-cf57875c7c92 req-8769d77a-5d99-4ea6-8437-55bb6e98b298 service nova] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.452491] env[62585]: DEBUG oslo_concurrency.lockutils [req-87271341-92dd-44a7-9031-cf57875c7c92 req-8769d77a-5d99-4ea6-8437-55bb6e98b298 service nova] Lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.452849] env[62585]: DEBUG oslo_concurrency.lockutils [req-87271341-92dd-44a7-9031-cf57875c7c92 req-8769d77a-5d99-4ea6-8437-55bb6e98b298 service nova] Lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.453110] env[62585]: DEBUG nova.compute.manager [req-87271341-92dd-44a7-9031-cf57875c7c92 req-8769d77a-5d99-4ea6-8437-55bb6e98b298 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] No waiting events found dispatching network-vif-plugged-4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 900.453545] env[62585]: WARNING nova.compute.manager [req-87271341-92dd-44a7-9031-cf57875c7c92 req-8769d77a-5d99-4ea6-8437-55bb6e98b298 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received unexpected event network-vif-plugged-4b5af1c5-20c1-446c-aad5-023ac683f7e8 for instance with vm_state active and task_state None. [ 900.489301] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384873, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.642813] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.643668] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.643668] env[62585]: DEBUG nova.compute.manager [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Going to confirm migration 1 {{(pid=62585) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 900.665662] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528623b4-d6fb-f559-b192-c0aa5a88c56b, 'name': SearchDatastore_Task, 'duration_secs': 0.030449} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.665982] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.666305] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 2cf85b78-df04-40d0-a7db-5e8979574d0a/2cf85b78-df04-40d0-a7db-5e8979574d0a.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.666670] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2511dda9-9854-41a6-a7b0-7c68896dd311 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.676091] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 900.676091] env[62585]: value = "task-1384875" [ 900.676091] env[62585]: _type = "Task" [ 900.676091] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.690465] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.763105] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.763371] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.763574] env[62585]: DEBUG nova.network.neutron [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.805196] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.941493] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384874, 'name': CreateVM_Task, 'duration_secs': 0.454642} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.941829] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.942915] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.943260] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.943766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 900.945036] env[62585]: DEBUG nova.scheduler.client.report [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.950471] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2d48383-72fa-4d09-a0a6-9099f9310c41 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.958405] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 900.958405] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52372e29-0460-ec56-450b-20dcd1090a9d" [ 900.958405] env[62585]: _type = "Task" [ 900.958405] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.969620] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52372e29-0460-ec56-450b-20dcd1090a9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.989291] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384873, 'name': ReconfigVM_Task, 'duration_secs': 0.77338} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.989648] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 4b080cc3-e1cc-4b64-9926-c37b891444f5/4b080cc3-e1cc-4b64-9926-c37b891444f5.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.990398] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f06c18eb-6e1e-4954-b360-92089785a0b1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.000339] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 901.000339] env[62585]: value = "task-1384876" [ 901.000339] env[62585]: _type = "Task" [ 901.000339] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.010719] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384876, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.188956] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384875, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.191932] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.217455] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.218025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.218288] env[62585]: DEBUG nova.network.neutron [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.218646] env[62585]: DEBUG nova.objects.instance [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lazy-loading 'info_cache' on Instance uuid d96a04d7-b07f-439d-aafa-09dc70a4d1a7 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.321949] env[62585]: WARNING nova.network.neutron [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] 19cc8f17-f362-4186-901c-3dc61c1ef3e5 already exists in list: networks containing: ['19cc8f17-f362-4186-901c-3dc61c1ef3e5']. ignoring it [ 901.454524] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.455018] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 901.457824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.515s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.458072] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.460079] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.489s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.460309] env[62585]: DEBUG nova.objects.instance [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lazy-loading 'resources' on Instance uuid 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.489740] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52372e29-0460-ec56-450b-20dcd1090a9d, 'name': SearchDatastore_Task, 'duration_secs': 0.012289} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.490226] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.490525] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.490827] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.491019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.491304] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.491619] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d822c305-52ec-45e1-a58c-79a95ff69c4f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.496022] env[62585]: INFO nova.scheduler.client.report [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Deleted allocations for instance 8763a058-b453-4f03-9532-7d7e65efdfb2 [ 901.511899] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.512143] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.517203] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9540f621-b3d8-4b9d-9b16-97e2f9aecec0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.519956] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384876, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.525072] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 901.525072] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f53de5-c58f-9f84-b4f2-a11a0a47ed0a" [ 901.525072] env[62585]: _type = "Task" [ 901.525072] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.536207] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f53de5-c58f-9f84-b4f2-a11a0a47ed0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.690268] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384875, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684944} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.690544] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 2cf85b78-df04-40d0-a7db-5e8979574d0a/2cf85b78-df04-40d0-a7db-5e8979574d0a.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.690770] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.691060] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df996dbc-1008-4b5c-a985-69718395a805 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.700025] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 901.700025] env[62585]: value = "task-1384877" [ 901.700025] env[62585]: _type = "Task" [ 901.700025] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.710778] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.724472] env[62585]: DEBUG oslo_concurrency.lockutils [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.926832] env[62585]: DEBUG nova.network.neutron [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "address": "fa:16:3e:21:aa:89", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5af1c5-20", "ovs_interfaceid": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.967767] env[62585]: DEBUG nova.compute.utils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 901.970035] env[62585]: DEBUG nova.objects.instance [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lazy-loading 'numa_topology' on Instance uuid 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.970640] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 901.970865] env[62585]: DEBUG nova.network.neutron [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.016489] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384876, 'name': Rename_Task, 'duration_secs': 0.566441} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.016981] env[62585]: DEBUG oslo_concurrency.lockutils [None req-3b4fed37-e563-40e0-bede-c846cf212e4a tempest-SecurityGroupsTestJSON-11186561 tempest-SecurityGroupsTestJSON-11186561-project-member] Lock "8763a058-b453-4f03-9532-7d7e65efdfb2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.064s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.018079] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.018335] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da08650f-0569-42c8-8314-241c929f475d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.026309] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 902.026309] env[62585]: value = "task-1384878" [ 902.026309] env[62585]: _type = "Task" [ 902.026309] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.030959] env[62585]: DEBUG nova.policy [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 902.046083] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.046453] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f53de5-c58f-9f84-b4f2-a11a0a47ed0a, 'name': SearchDatastore_Task, 'duration_secs': 0.013017} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.047345] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f35d393-32a0-41fa-8287-57a85de735d9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.055078] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 902.055078] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6747e-f493-fba7-0953-649322968301" [ 902.055078] env[62585]: _type = "Task" [ 902.055078] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.065281] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6747e-f493-fba7-0953-649322968301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.211547] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164605} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.211817] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.212674] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9732daff-7f91-4c8d-9e68-ece837d41f9f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.238367] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 2cf85b78-df04-40d0-a7db-5e8979574d0a/2cf85b78-df04-40d0-a7db-5e8979574d0a.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.239935] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abd6f849-a4e6-49dd-829c-d530deea52ae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.256369] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Received event network-changed-4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.256583] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Refreshing instance network info cache due to event network-changed-4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 902.256826] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Acquiring lock "refresh_cache-fcbbc06c-71fa-4891-8bfc-0de746b9e622" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.256992] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Acquired lock "refresh_cache-fcbbc06c-71fa-4891-8bfc-0de746b9e622" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.257182] env[62585]: DEBUG nova.network.neutron [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Refreshing network info cache for port 4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 902.266967] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 902.266967] env[62585]: value = "task-1384879" [ 902.266967] env[62585]: _type = "Task" [ 902.266967] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.279108] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.395827] env[62585]: DEBUG nova.network.neutron [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Successfully created port: 795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.429824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.430586] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.430755] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.431630] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2127084-c8de-43b4-953f-d0f8d9e4e6c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.453954] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 902.454356] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 902.454605] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.454859] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 902.455106] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.455315] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 902.455726] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 902.456215] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 902.456469] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 902.456910] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 902.457355] env[62585]: DEBUG nova.virt.hardware [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 902.464313] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Reconfiguring VM to attach interface {{(pid=62585) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 902.467176] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24f0b0bb-729e-4191-aeb5-279babf51875 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.479829] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 902.482874] env[62585]: DEBUG nova.objects.base [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Object Instance<6057e13b-71df-458d-b6ed-c139a8c57836> lazy-loaded attributes: resources,numa_topology {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 902.499088] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 902.499088] env[62585]: value = "task-1384880" [ 902.499088] env[62585]: _type = "Task" [ 902.499088] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.510015] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384880, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.538926] env[62585]: DEBUG nova.network.neutron [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [{"id": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "address": "fa:16:3e:35:20:3f", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc92c2d1e-31", "ovs_interfaceid": "c92c2d1e-3117-42a1-a5d2-3de9eba6e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.552040] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384878, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.570023] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6747e-f493-fba7-0953-649322968301, 'name': SearchDatastore_Task, 'duration_secs': 0.016393} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.570335] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.570606] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] fcbbc06c-71fa-4891-8bfc-0de746b9e622/fcbbc06c-71fa-4891-8bfc-0de746b9e622.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.571615] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0f69fea-5aa7-4784-bc79-1c1aa2e5654d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.583492] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 902.583492] env[62585]: value = "task-1384881" [ 902.583492] env[62585]: _type = "Task" [ 902.583492] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.597082] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.708962] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd770183-c90b-4b9a-944b-8ee681db64a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.721309] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97caecd-da3f-4d3e-8913-8d623030fc00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.755431] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e83cbc-e8ef-45ee-a546-63dbaf4488c7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.765380] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e344fba-dad6-4cd9-9862-061709835052 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.783760] env[62585]: DEBUG nova.compute.provider_tree [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.788235] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384879, 'name': ReconfigVM_Task, 'duration_secs': 0.475938} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.788789] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 2cf85b78-df04-40d0-a7db-5e8979574d0a/2cf85b78-df04-40d0-a7db-5e8979574d0a.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.789508] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9af60a7c-e51a-4355-88a1-b907f16fad96 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.797962] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 902.797962] env[62585]: value = "task-1384882" [ 902.797962] env[62585]: _type = "Task" [ 902.797962] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.807260] env[62585]: DEBUG oslo_concurrency.lockutils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.807585] env[62585]: DEBUG oslo_concurrency.lockutils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.812528] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384882, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.001529] env[62585]: DEBUG nova.network.neutron [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Updated VIF entry in instance network info cache for port 4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 903.001925] env[62585]: DEBUG nova.network.neutron [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Updating instance_info_cache with network_info: [{"id": "4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5", "address": "fa:16:3e:3b:b0:24", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e6e9eb6-db", "ovs_interfaceid": "4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.013640] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.043484] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-d96a04d7-b07f-439d-aafa-09dc70a4d1a7" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.043821] env[62585]: DEBUG nova.objects.instance [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lazy-loading 'migration_context' on Instance uuid d96a04d7-b07f-439d-aafa-09dc70a4d1a7 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.045062] env[62585]: DEBUG oslo_vmware.api [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384878, 'name': PowerOnVM_Task, 'duration_secs': 0.669781} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.045648] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.045866] env[62585]: INFO nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Took 9.43 seconds to spawn the instance on the hypervisor. [ 903.046068] env[62585]: DEBUG nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 903.047096] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24f1832-f115-43b8-bc1b-9d2bb90cc1f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.094543] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384881, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.291156] env[62585]: DEBUG nova.scheduler.client.report [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.311410] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384882, 'name': Rename_Task, 'duration_secs': 0.4924} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.311687] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.311988] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-126e0763-024a-4ba7-8993-07a488369413 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.314519] env[62585]: DEBUG nova.compute.utils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 903.322497] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 903.322497] env[62585]: value = "task-1384883" [ 903.322497] env[62585]: _type = "Task" [ 903.322497] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.333591] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.495920] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 903.510107] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Releasing lock "refresh_cache-fcbbc06c-71fa-4891-8bfc-0de746b9e622" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.510313] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Received event network-vif-unplugged-969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 903.510463] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Acquiring lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.510672] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.510836] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.511019] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] No waiting events found dispatching network-vif-unplugged-969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 903.511203] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Received event network-vif-unplugged-969d2b65-14d8-4ce4-b801-2bdc9e536e20 for instance with task_state deleting. {{(pid=62585) _process_instance_event /opt/stack/nova/nova/compute/manager.py:10909}} [ 903.511372] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Received event network-changed-969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 903.511529] env[62585]: DEBUG nova.compute.manager [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Refreshing instance network info cache due to event network-changed-969d2b65-14d8-4ce4-b801-2bdc9e536e20. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 903.511718] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Acquiring lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.511880] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Acquired lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.512072] env[62585]: DEBUG nova.network.neutron [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Refreshing network info cache for port 969d2b65-14d8-4ce4-b801-2bdc9e536e20 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 903.513241] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.520869] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 903.521140] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 903.521317] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.521509] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 903.521660] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.521812] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 903.522031] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 903.522219] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 903.522395] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 903.522562] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 903.522740] env[62585]: DEBUG nova.virt.hardware [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.523551] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ec7c52-8d78-46c3-bfcc-afe2f67537dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.533169] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ce9fde-ae9a-486a-b77c-0d2e3967f66a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.548296] env[62585]: DEBUG nova.objects.base [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 903.549426] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994f786e-0497-441a-96bd-2c2bc9e356d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.572942] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1900f85c-d059-44f8-9017-0bb537f50a91 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.575469] env[62585]: INFO nova.compute.manager [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Took 19.66 seconds to build instance. [ 903.580803] env[62585]: DEBUG oslo_vmware.api [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 903.580803] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52963c8f-9cb7-c194-bda6-b7227e70f0bf" [ 903.580803] env[62585]: _type = "Task" [ 903.580803] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.597247] env[62585]: DEBUG oslo_vmware.api [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52963c8f-9cb7-c194-bda6-b7227e70f0bf, 'name': SearchDatastore_Task, 'duration_secs': 0.013378} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.600674] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.601235] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384881, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639909} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.601534] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] fcbbc06c-71fa-4891-8bfc-0de746b9e622/fcbbc06c-71fa-4891-8bfc-0de746b9e622.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.601761] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.602029] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6568e54a-f0af-4fbc-9d1f-50ba759340c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.612778] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 903.612778] env[62585]: value = "task-1384884" [ 903.612778] env[62585]: _type = "Task" [ 903.612778] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.622761] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384884, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.798038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.338s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.800717] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.996s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.800918] env[62585]: DEBUG nova.objects.instance [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'resources' on Instance uuid f1bfef38-b6d0-40d0-8e60-310f8a75dd78 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.817507] env[62585]: DEBUG oslo_concurrency.lockutils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.834393] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384883, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.010671] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.044064] env[62585]: DEBUG nova.network.neutron [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Successfully updated port: 795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.078304] env[62585]: DEBUG oslo_concurrency.lockutils [None req-197d8983-06d1-4949-87c0-ff50d0bf3500 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.179s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.123500] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384884, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.223075] env[62585]: DEBUG nova.network.neutron [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updated VIF entry in instance network info cache for port 969d2b65-14d8-4ce4-b801-2bdc9e536e20. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 904.223461] env[62585]: DEBUG nova.network.neutron [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updating instance_info_cache with network_info: [{"id": "969d2b65-14d8-4ce4-b801-2bdc9e536e20", "address": "fa:16:3e:df:d6:69", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": null, "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap969d2b65-14", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.304577] env[62585]: DEBUG nova.objects.instance [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'numa_topology' on Instance uuid f1bfef38-b6d0-40d0-8e60-310f8a75dd78 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.309765] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cfdd2f22-b689-4754-b20a-3189a11ab9e3 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.593s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.310675] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.119s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.310765] env[62585]: INFO nova.compute.manager [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Unshelving [ 904.335738] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384883, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.422254] env[62585]: DEBUG nova.compute.manager [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-changed-4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.422404] env[62585]: DEBUG nova.compute.manager [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing instance network info cache due to event network-changed-4b5af1c5-20c1-446c-aad5-023ac683f7e8. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.422626] env[62585]: DEBUG oslo_concurrency.lockutils [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.422775] env[62585]: DEBUG oslo_concurrency.lockutils [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.423076] env[62585]: DEBUG nova.network.neutron [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing network info cache for port 4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.514894] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384880, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.548807] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-01941b61-1960-4360-9dd0-513d5597bc70" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.549057] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-01941b61-1960-4360-9dd0-513d5597bc70" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.549219] env[62585]: DEBUG nova.network.neutron [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.627549] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384884, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.726695] env[62585]: DEBUG oslo_concurrency.lockutils [req-ac13b710-c1d1-4751-aba3-134290dc2520 req-ef92b2a2-49bb-4fd6-a641-efa646938f25 service nova] Releasing lock "refresh_cache-f1bfef38-b6d0-40d0-8e60-310f8a75dd78" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.810496] env[62585]: DEBUG nova.objects.base [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 904.840517] env[62585]: DEBUG oslo_vmware.api [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384883, 'name': PowerOnVM_Task, 'duration_secs': 1.146533} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.841261] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.841762] env[62585]: INFO nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Took 8.88 seconds to spawn the instance on the hypervisor. [ 904.841963] env[62585]: DEBUG nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 904.842870] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de74a841-d0e8-46d2-b41a-ec9cf742c83f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.882800] env[62585]: DEBUG oslo_concurrency.lockutils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.883086] env[62585]: DEBUG oslo_concurrency.lockutils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.883350] env[62585]: INFO nova.compute.manager [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Attaching volume aa1719fc-04bb-46a3-84c3-9f3625281f3d to /dev/sdb [ 904.922778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018a5249-21a6-4aa5-8144-34e682c5d484 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.939711] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f895aefc-e83f-4d61-a7b1-b873ee64a683 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.959275] env[62585]: DEBUG nova.virt.block_device [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updating existing volume attachment record: ad24752b-2da8-4fae-bf82-6ed5ee3f28cd {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 905.013486] env[62585]: DEBUG oslo_vmware.api [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384880, 'name': ReconfigVM_Task, 'duration_secs': 2.077921} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.015448] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.015699] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Reconfigured VM to attach interface {{(pid=62585) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 905.023025] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3610e501-7559-488d-b477-c8c16fd68392 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.035312] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adde4b4-0df6-4024-81e4-4b6783d63dae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.078077] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ecfce6-bca9-40e2-a27a-a28f598815d4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.088969] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa38ec93-c8f9-4316-bb23-35ea37fa8153 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.109909] env[62585]: DEBUG nova.compute.provider_tree [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.124708] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384884, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.157701} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.125018] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.129024] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b254ea-1863-4e35-873c-2db1e84db1bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.152279] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] fcbbc06c-71fa-4891-8bfc-0de746b9e622/fcbbc06c-71fa-4891-8bfc-0de746b9e622.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.153656] env[62585]: DEBUG nova.network.neutron [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.155934] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77a83769-8334-4843-805b-d4e2feb86e0a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.179728] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 905.179728] env[62585]: value = "task-1384886" [ 905.179728] env[62585]: _type = "Task" [ 905.179728] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.188867] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384886, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.264961] env[62585]: DEBUG nova.network.neutron [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updated VIF entry in instance network info cache for port 4b5af1c5-20c1-446c-aad5-023ac683f7e8. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.265688] env[62585]: DEBUG nova.network.neutron [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "address": "fa:16:3e:21:aa:89", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5af1c5-20", "ovs_interfaceid": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.336494] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.363851] env[62585]: INFO nova.compute.manager [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Took 21.15 seconds to build instance. [ 905.525943] env[62585]: DEBUG oslo_concurrency.lockutils [None req-340a7d72-1441-4a1e-b02a-9a92eaf9f81f tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-a634a80e-d90a-4ce3-8233-75657a7754be-4b5af1c5-20c1-446c-aad5-023ac683f7e8" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.420s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.528386] env[62585]: DEBUG nova.network.neutron [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Updating instance_info_cache with network_info: [{"id": "795b88c3-09ab-44aa-bb6f-8bd339ffc0de", "address": "fa:16:3e:41:63:46", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b88c3-09", "ovs_interfaceid": "795b88c3-09ab-44aa-bb6f-8bd339ffc0de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.616133] env[62585]: DEBUG nova.scheduler.client.report [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 905.690731] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384886, 'name': ReconfigVM_Task, 'duration_secs': 0.487824} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.691343] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Reconfigured VM instance instance-00000056 to attach disk [datastore1] fcbbc06c-71fa-4891-8bfc-0de746b9e622/fcbbc06c-71fa-4891-8bfc-0de746b9e622.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.691757] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-065e61a0-01f4-4e59-a2b2-1b25f6e28c2a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.701016] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 905.701016] env[62585]: value = "task-1384889" [ 905.701016] env[62585]: _type = "Task" [ 905.701016] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.710874] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384889, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.770379] env[62585]: DEBUG oslo_concurrency.lockutils [req-082fa0a5-3bf5-4501-9373-3be8f9fffcf2 req-78d0b84d-fe15-4a74-a8e8-1088bbaa3371 service nova] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.867125] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9ea12db5-c7ca-4c5d-8be0-2429dd8cb4b1 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.666s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.033126] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-01941b61-1960-4360-9dd0-513d5597bc70" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.033126] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Instance network_info: |[{"id": "795b88c3-09ab-44aa-bb6f-8bd339ffc0de", "address": "fa:16:3e:41:63:46", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b88c3-09", "ovs_interfaceid": "795b88c3-09ab-44aa-bb6f-8bd339ffc0de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 906.033631] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:63:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '795b88c3-09ab-44aa-bb6f-8bd339ffc0de', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.041461] env[62585]: DEBUG oslo.service.loopingcall [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.042251] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 906.042473] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ca43ee8-0938-4db4-8be3-4b5259fe809e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.065633] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.065633] env[62585]: value = "task-1384890" [ 906.065633] env[62585]: _type = "Task" [ 906.065633] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.075858] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384890, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.121426] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.321s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.124541] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.523s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.219649] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384889, 'name': Rename_Task, 'duration_secs': 0.273034} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.220103] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.220946] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ba2a225-65b0-436d-badb-a0bd1bf8a80a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.229408] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 906.229408] env[62585]: value = "task-1384891" [ 906.229408] env[62585]: _type = "Task" [ 906.229408] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.240869] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.512950] env[62585]: DEBUG nova.compute.manager [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Received event network-vif-plugged-795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.514273] env[62585]: DEBUG oslo_concurrency.lockutils [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] Acquiring lock "01941b61-1960-4360-9dd0-513d5597bc70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.514772] env[62585]: DEBUG oslo_concurrency.lockutils [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] Lock "01941b61-1960-4360-9dd0-513d5597bc70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.515142] env[62585]: DEBUG oslo_concurrency.lockutils [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] Lock "01941b61-1960-4360-9dd0-513d5597bc70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.515481] env[62585]: DEBUG nova.compute.manager [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] No waiting events found dispatching network-vif-plugged-795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 906.515804] env[62585]: WARNING nova.compute.manager [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Received unexpected event network-vif-plugged-795b88c3-09ab-44aa-bb6f-8bd339ffc0de for instance with vm_state building and task_state spawning. [ 906.516151] env[62585]: DEBUG nova.compute.manager [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Received event network-changed-795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.516449] env[62585]: DEBUG nova.compute.manager [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Refreshing instance network info cache due to event network-changed-795b88c3-09ab-44aa-bb6f-8bd339ffc0de. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 906.516751] env[62585]: DEBUG oslo_concurrency.lockutils [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] Acquiring lock "refresh_cache-01941b61-1960-4360-9dd0-513d5597bc70" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.516988] env[62585]: DEBUG oslo_concurrency.lockutils [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] Acquired lock "refresh_cache-01941b61-1960-4360-9dd0-513d5597bc70" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.517323] env[62585]: DEBUG nova.network.neutron [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Refreshing network info cache for port 795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.578158] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384890, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.636195] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b49e9b37-1744-4d2d-96d8-89d01986d320 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.282s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.638802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.914s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.638802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.638802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.638802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.641781] env[62585]: INFO nova.compute.manager [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Terminating instance [ 906.644834] env[62585]: DEBUG nova.compute.manager [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.645120] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.645450] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1369b2b6-ac81-4468-b152-d2b7f5a5287d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.657999] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cb423c-a9fa-4208-84de-8fe40f38b139 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.701727] env[62585]: WARNING nova.virt.vmwareapi.vmops [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1bfef38-b6d0-40d0-8e60-310f8a75dd78 could not be found. [ 906.702295] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.702505] env[62585]: INFO nova.compute.manager [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Took 0.06 seconds to destroy the instance on the hypervisor. [ 906.702794] env[62585]: DEBUG oslo.service.loopingcall [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.706179] env[62585]: DEBUG nova.compute.manager [-] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.706179] env[62585]: DEBUG nova.network.neutron [-] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.743009] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.885606] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0f8b40-af8f-4987-aba9-a0997e922b55 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.894907] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7be38fb-b8d8-4de0-9fe9-a4f81b7b6f6a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.929354] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed81f5b3-6694-4606-9bc2-c5247198ea1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.939767] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbe7163-0c07-4d5a-85e8-cbe820c52dfd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.957496] env[62585]: DEBUG nova.compute.provider_tree [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.992978] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "interface-a634a80e-d90a-4ce3-8233-75657a7754be-4b5af1c5-20c1-446c-aad5-023ac683f7e8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.993692] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-a634a80e-d90a-4ce3-8233-75657a7754be-4b5af1c5-20c1-446c-aad5-023ac683f7e8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.076874] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384890, 'name': CreateVM_Task, 'duration_secs': 0.81265} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.077181] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 907.077955] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.078154] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.078515] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 907.078811] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf2abd00-93db-450a-9af0-645e633ad3ba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.084190] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 907.084190] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522bd9fd-8239-ea1f-a5eb-b1ff3eb38388" [ 907.084190] env[62585]: _type = "Task" [ 907.084190] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.094669] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522bd9fd-8239-ea1f-a5eb-b1ff3eb38388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.243794] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.313024] env[62585]: DEBUG nova.network.neutron [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Updated VIF entry in instance network info cache for port 795b88c3-09ab-44aa-bb6f-8bd339ffc0de. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 907.313024] env[62585]: DEBUG nova.network.neutron [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Updating instance_info_cache with network_info: [{"id": "795b88c3-09ab-44aa-bb6f-8bd339ffc0de", "address": "fa:16:3e:41:63:46", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b88c3-09", "ovs_interfaceid": "795b88c3-09ab-44aa-bb6f-8bd339ffc0de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.463127] env[62585]: DEBUG nova.scheduler.client.report [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 907.497253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.497253] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.498175] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08238873-56a5-40ab-943e-4dee63d4604a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.518950] env[62585]: DEBUG nova.network.neutron [-] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.521596] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dc9e80-54ab-40ff-a361-fba87b002e14 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.551635] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Reconfiguring VM to detach interface {{(pid=62585) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 907.553116] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3643b239-f3cb-47ea-a26b-4d6a6ec32e20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.573489] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 907.573489] env[62585]: value = "task-1384892" [ 907.573489] env[62585]: _type = "Task" [ 907.573489] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.583256] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.593424] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522bd9fd-8239-ea1f-a5eb-b1ff3eb38388, 'name': SearchDatastore_Task, 'duration_secs': 0.017297} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.593724] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.593967] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.594232] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.594385] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.594565] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.594826] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90a1c77f-d9fb-49a9-be0e-4aaaf9458b0e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.604039] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.604267] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.605055] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37954b46-df72-4dbe-8629-91ab5d65107b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.611461] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 907.611461] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5267ead1-017c-b52b-a375-a0eeae84814a" [ 907.611461] env[62585]: _type = "Task" [ 907.611461] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.620980] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5267ead1-017c-b52b-a375-a0eeae84814a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.743526] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.814440] env[62585]: DEBUG oslo_concurrency.lockutils [req-bf877394-e2c0-4591-acfc-777b3cf2e9c9 req-90d4f61b-6f91-41d5-bdd9-bf1df58e26a9 service nova] Releasing lock "refresh_cache-01941b61-1960-4360-9dd0-513d5597bc70" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.023735] env[62585]: INFO nova.compute.manager [-] [instance: f1bfef38-b6d0-40d0-8e60-310f8a75dd78] Took 1.32 seconds to deallocate network for instance. [ 908.085800] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.123400] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5267ead1-017c-b52b-a375-a0eeae84814a, 'name': SearchDatastore_Task, 'duration_secs': 0.014687} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.124220] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d874b8e-c69f-4737-8705-3a2e863faa27 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.130743] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 908.130743] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52899087-9aa0-6e59-6084-79c171ec5d8c" [ 908.130743] env[62585]: _type = "Task" [ 908.130743] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.142216] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52899087-9aa0-6e59-6084-79c171ec5d8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.243702] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.475683] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.351s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.479216] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.142s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.479216] env[62585]: DEBUG nova.objects.instance [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lazy-loading 'pci_requests' on Instance uuid 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.525430] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.525706] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.525893] env[62585]: INFO nova.compute.manager [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Shelving [ 908.587117] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.641494] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52899087-9aa0-6e59-6084-79c171ec5d8c, 'name': SearchDatastore_Task, 'duration_secs': 0.012845} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.641776] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.642129] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 01941b61-1960-4360-9dd0-513d5597bc70/01941b61-1960-4360-9dd0-513d5597bc70.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.642526] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ffdb9dd-7759-49f9-90b8-d3a30e136522 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.653084] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 908.653084] env[62585]: value = "task-1384893" [ 908.653084] env[62585]: _type = "Task" [ 908.653084] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.662370] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.745927] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.984199] env[62585]: DEBUG nova.objects.instance [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lazy-loading 'numa_topology' on Instance uuid 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.033816] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.034158] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0423bd40-412a-4419-8bf1-87026ff163e4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.039694] env[62585]: INFO nova.scheduler.client.report [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted allocation for migration fd35d3e7-abab-46e9-98a6-bb0a3cbf68a6 [ 909.050450] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 909.050450] env[62585]: value = "task-1384894" [ 909.050450] env[62585]: _type = "Task" [ 909.050450] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.051173] env[62585]: DEBUG oslo_concurrency.lockutils [None req-27c10dbe-7c65-4830-b4fe-ab719aabf4f2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "f1bfef38-b6d0-40d0-8e60-310f8a75dd78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.413s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.064440] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384894, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.088519] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.168640] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384893, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.250431] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.488952] env[62585]: INFO nova.compute.claims [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.552650] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4dd8fdc6-5447-4090-9aa7-6de0239c8155 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.909s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.568192] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384894, 'name': PowerOffVM_Task, 'duration_secs': 0.315936} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.568192] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.568465] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eda933e-04e2-4a2c-b435-ac500b31f2a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.593585] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db191d9-98f9-4d70-b777-e9336b042d19 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.604312] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.669347] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702948} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.669638] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 01941b61-1960-4360-9dd0-513d5597bc70/01941b61-1960-4360-9dd0-513d5597bc70.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.669944] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.670291] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2f124cc-0eaf-417a-aa13-03b6671d27fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.678614] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 909.678614] env[62585]: value = "task-1384896" [ 909.678614] env[62585]: _type = "Task" [ 909.678614] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.689432] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.745796] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.101031] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.114521] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Creating Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 910.114897] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-452b64d6-7971-4216-b099-0c0c043bbeaa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.126546] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 910.126546] env[62585]: value = "task-1384897" [ 910.126546] env[62585]: _type = "Task" [ 910.126546] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.137802] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384897, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.189303] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213396} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.189612] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.190476] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642472ad-2271-47bf-ac21-bdd031bb5722 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.217363] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 01941b61-1960-4360-9dd0-513d5597bc70/01941b61-1960-4360-9dd0-513d5597bc70.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.219634] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e4c5b56-5cbf-494c-8149-7bc09ad0b798 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.237195] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "3abb84ea-b613-4956-a64f-c4ad230343c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.237441] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.245613] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 910.245613] env[62585]: value = "task-1384898" [ 910.245613] env[62585]: _type = "Task" [ 910.245613] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.252713] env[62585]: DEBUG oslo_vmware.api [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384891, 'name': PowerOnVM_Task, 'duration_secs': 3.763395} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.253376] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.253582] env[62585]: INFO nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Took 11.95 seconds to spawn the instance on the hypervisor. [ 910.253766] env[62585]: DEBUG nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 910.254532] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d4e373-248d-4cf6-8112-7a6a95ce567f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.260392] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384898, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.303805] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52aad523-4213-ff83-4871-103d8afe2011/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 910.304794] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034eb18a-dc24-43f2-aeaf-5da024faaf71 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.312697] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52aad523-4213-ff83-4871-103d8afe2011/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 910.312976] env[62585]: ERROR oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52aad523-4213-ff83-4871-103d8afe2011/disk-0.vmdk due to incomplete transfer. [ 910.314014] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5e7f314f-7232-41c9-9754-fbaeb6909c70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.324019] env[62585]: DEBUG oslo_vmware.rw_handles [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52aad523-4213-ff83-4871-103d8afe2011/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 910.324266] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Uploaded image 310691a0-fca0-4934-a7c2-2e7b96be6e6c to the Glance image server {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 910.326785] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Destroying the VM {{(pid=62585) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 910.327073] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f6243cb9-c2ef-4123-a0c4-9d32f58cee1f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.334764] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 910.334764] env[62585]: value = "task-1384899" [ 910.334764] env[62585]: _type = "Task" [ 910.334764] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.344406] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384899, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.602261] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.639790] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384897, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.674539] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5cb5df-34dd-4aea-a7d0-d9b6f7fe0b28 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.683338] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3600ee8c-160f-4f2f-890f-f4c391483f36 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.715560] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e316b7-bf0d-4473-afb0-01e1f25ba6f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.723454] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecbaf45-b94f-4a4d-bd1f-794f05b04e97 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.739159] env[62585]: DEBUG nova.compute.provider_tree [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.744305] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 910.756763] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.778516] env[62585]: INFO nova.compute.manager [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Took 26.27 seconds to build instance. [ 910.845141] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384899, 'name': Destroy_Task, 'duration_secs': 0.463951} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.845437] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Destroyed the VM [ 910.845695] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Deleting Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 910.846023] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b085946d-2a05-4e24-8c6d-c5d15728c6c3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.856536] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 910.856536] env[62585]: value = "task-1384900" [ 910.856536] env[62585]: _type = "Task" [ 910.856536] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.865981] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384900, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.104516] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.140939] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384897, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.242586] env[62585]: DEBUG nova.scheduler.client.report [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.259260] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.265088] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.279432] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d483579d-a365-4674-810f-31b14ccc687a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.793s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.368491] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384900, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.529582] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Volume attach. Driver type: vmdk {{(pid=62585) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 911.531082] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294040', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'name': 'volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '70ac6289-2f14-4fb0-a811-97d76cafc532', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'serial': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 911.531082] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9df999-f701-4c6a-8da6-ce98be4c477c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.553509] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074b2006-a1bb-4c96-b276-ee3f86892dc3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.582286] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d/volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.582843] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81c88c14-2bf0-4352-a6a4-7fe870be73db {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.599480] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.599736] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.599982] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.600194] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.600367] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.602917] env[62585]: INFO nova.compute.manager [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Terminating instance [ 911.605676] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 911.605676] env[62585]: value = "task-1384901" [ 911.605676] env[62585]: _type = "Task" [ 911.605676] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.609226] env[62585]: DEBUG nova.compute.manager [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 911.609445] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.609800] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.610513] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eb813d-587c-4c8e-b7ac-dbca2f28c651 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.623460] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384901, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.623753] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 911.624063] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ac8ec78-ed32-4293-9f40-6d56c9f8b2cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.634777] env[62585]: DEBUG oslo_vmware.api [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 911.634777] env[62585]: value = "task-1384902" [ 911.634777] env[62585]: _type = "Task" [ 911.634777] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.642367] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384897, 'name': CreateSnapshot_Task, 'duration_secs': 1.268851} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.642367] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Created Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 911.643179] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dda830c-4285-42ee-9927-65511f63f1de {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.650243] env[62585]: DEBUG oslo_vmware.api [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.747846] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.269s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.750318] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.485s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.751917] env[62585]: INFO nova.compute.claims [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.764791] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.788024] env[62585]: INFO nova.network.neutron [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating port bc3d19ab-ba98-4935-9e08-61c5df21be43 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 911.869993] env[62585]: DEBUG oslo_vmware.api [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384900, 'name': RemoveSnapshot_Task, 'duration_secs': 0.712822} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.870436] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Deleted Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 911.870621] env[62585]: INFO nova.compute.manager [None req-c24a451e-a3b8-4d46-90f2-09a624598add tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Took 18.33 seconds to snapshot the instance on the hypervisor. [ 911.881032] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "b6186aef-8f4c-409a-83aa-1548545ea7c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.881373] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.111152] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.122280] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.150538] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "54f542b5-3aba-49d6-a487-62714416b86f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.150538] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "54f542b5-3aba-49d6-a487-62714416b86f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.157508] env[62585]: DEBUG oslo_vmware.api [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384902, 'name': PowerOffVM_Task, 'duration_secs': 0.334848} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.157508] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 912.157508] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 912.157508] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87987d56-fded-4080-a324-3ac1d726c9cd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.169648] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Creating linked-clone VM from snapshot {{(pid=62585) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 912.171278] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e7388ccc-acbd-4ff5-987c-660512a3d0f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.184339] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 912.184339] env[62585]: value = "task-1384904" [ 912.184339] env[62585]: _type = "Task" [ 912.184339] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.194957] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384904, 'name': CloneVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.263159] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384898, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.383780] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.613110] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.622538] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384901, 'name': ReconfigVM_Task, 'duration_secs': 0.608568} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.622819] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Reconfigured VM instance instance-0000004c to attach disk [datastore1] volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d/volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.631363] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b1cb7ce-cbac-4275-b3a7-abf15ecf83fd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.645960] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 912.645960] env[62585]: value = "task-1384905" [ 912.645960] env[62585]: _type = "Task" [ 912.645960] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.655112] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.664521] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384905, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.697422] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384904, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.728981] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 912.730297] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 912.730297] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleting the datastore file [datastore2] d96a04d7-b07f-439d-aafa-09dc70a4d1a7 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.730297] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc482b5e-98b3-40a5-9fba-9b1c82cdc948 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.740075] env[62585]: DEBUG oslo_vmware.api [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 912.740075] env[62585]: value = "task-1384906" [ 912.740075] env[62585]: _type = "Task" [ 912.740075] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.749353] env[62585]: DEBUG oslo_vmware.api [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.761805] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384898, 'name': ReconfigVM_Task, 'duration_secs': 2.024818} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.762159] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 01941b61-1960-4360-9dd0-513d5597bc70/01941b61-1960-4360-9dd0-513d5597bc70.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.762893] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b40905f4-c9e7-44dd-bd38-17d022dfd584 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.773986] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 912.773986] env[62585]: value = "task-1384907" [ 912.773986] env[62585]: _type = "Task" [ 912.773986] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.790092] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384907, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.910766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.993535] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ef7cec-c961-4719-abe6-5d9150093362 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.005398] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb094c4-5309-4e43-b525-ff82e5021b34 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.037523] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566e1c20-0a0b-48f2-b0b9-1f8e7a48127d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.045906] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f466bdc-895d-45bb-8e05-070c0a305aed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.061028] env[62585]: DEBUG nova.compute.provider_tree [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.111612] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.156461] env[62585]: DEBUG oslo_vmware.api [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384905, 'name': ReconfigVM_Task, 'duration_secs': 0.330256} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.156724] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294040', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'name': 'volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '70ac6289-2f14-4fb0-a811-97d76cafc532', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'serial': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 913.183642] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.198311] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384904, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.250791] env[62585]: DEBUG oslo_vmware.api [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21913} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.251149] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.251356] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.251600] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.251800] env[62585]: INFO nova.compute.manager [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Took 1.64 seconds to destroy the instance on the hypervisor. [ 913.252078] env[62585]: DEBUG oslo.service.loopingcall [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 913.252285] env[62585]: DEBUG nova.compute.manager [-] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 913.252371] env[62585]: DEBUG nova.network.neutron [-] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.286467] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384907, 'name': Rename_Task, 'duration_secs': 0.198226} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.286862] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.287048] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc97e2ee-e7d8-42b6-b1ed-cc3428f11eec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.295588] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 913.295588] env[62585]: value = "task-1384908" [ 913.295588] env[62585]: _type = "Task" [ 913.295588] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.305379] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.434526] env[62585]: DEBUG nova.compute.manager [req-134ea6fe-a1c0-42ca-8bb4-cdf36ff2420e req-3a6519cd-812a-46f4-809f-1d25df90a09d service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-vif-plugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.434804] env[62585]: DEBUG oslo_concurrency.lockutils [req-134ea6fe-a1c0-42ca-8bb4-cdf36ff2420e req-3a6519cd-812a-46f4-809f-1d25df90a09d service nova] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.435051] env[62585]: DEBUG oslo_concurrency.lockutils [req-134ea6fe-a1c0-42ca-8bb4-cdf36ff2420e req-3a6519cd-812a-46f4-809f-1d25df90a09d service nova] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.435229] env[62585]: DEBUG oslo_concurrency.lockutils [req-134ea6fe-a1c0-42ca-8bb4-cdf36ff2420e req-3a6519cd-812a-46f4-809f-1d25df90a09d service nova] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.435395] env[62585]: DEBUG nova.compute.manager [req-134ea6fe-a1c0-42ca-8bb4-cdf36ff2420e req-3a6519cd-812a-46f4-809f-1d25df90a09d service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] No waiting events found dispatching network-vif-plugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 913.435966] env[62585]: WARNING nova.compute.manager [req-134ea6fe-a1c0-42ca-8bb4-cdf36ff2420e req-3a6519cd-812a-46f4-809f-1d25df90a09d service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received unexpected event network-vif-plugged-bc3d19ab-ba98-4935-9e08-61c5df21be43 for instance with vm_state shelved_offloaded and task_state spawning. [ 913.494020] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.494020] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.494020] env[62585]: DEBUG nova.compute.manager [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 913.494020] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9435744-350e-4727-aca6-59863e29293d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.505427] env[62585]: DEBUG nova.compute.manager [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62585) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 913.505427] env[62585]: DEBUG nova.objects.instance [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lazy-loading 'flavor' on Instance uuid 4b080cc3-e1cc-4b64-9926-c37b891444f5 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.526841] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.526841] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.526841] env[62585]: DEBUG nova.network.neutron [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.549538] env[62585]: DEBUG nova.compute.manager [req-8dc1315f-c49c-4636-8e1e-513336749106 req-1c97a36b-736b-4884-b633-4164ededabf3 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Received event network-vif-deleted-c92c2d1e-3117-42a1-a5d2-3de9eba6e107 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.549845] env[62585]: INFO nova.compute.manager [req-8dc1315f-c49c-4636-8e1e-513336749106 req-1c97a36b-736b-4884-b633-4164ededabf3 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Neutron deleted interface c92c2d1e-3117-42a1-a5d2-3de9eba6e107; detaching it from the instance and deleting it from the info cache [ 913.550097] env[62585]: DEBUG nova.network.neutron [req-8dc1315f-c49c-4636-8e1e-513336749106 req-1c97a36b-736b-4884-b633-4164ededabf3 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.565670] env[62585]: DEBUG nova.scheduler.client.report [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 913.611468] env[62585]: DEBUG oslo_vmware.api [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384892, 'name': ReconfigVM_Task, 'duration_secs': 5.825895} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.611798] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.612062] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Reconfigured VM to detach interface {{(pid=62585) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 913.696839] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384904, 'name': CloneVM_Task, 'duration_secs': 1.23369} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.697141] env[62585]: INFO nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Created linked-clone VM from snapshot [ 913.697950] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14716cbd-d7f4-408e-971c-2bc482ae0616 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.708517] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Uploading image 0a472987-1439-41cb-99c4-bd857ec93b02 {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 913.736197] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 913.736197] env[62585]: value = "vm-294043" [ 913.736197] env[62585]: _type = "VirtualMachine" [ 913.736197] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 913.736499] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-94ecb96f-54da-41df-875a-9c3f8fc1383e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.745612] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lease: (returnval){ [ 913.745612] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52afbb8c-3d89-9ae2-65ce-066fa8ae82d3" [ 913.745612] env[62585]: _type = "HttpNfcLease" [ 913.745612] env[62585]: } obtained for exporting VM: (result){ [ 913.745612] env[62585]: value = "vm-294043" [ 913.745612] env[62585]: _type = "VirtualMachine" [ 913.745612] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 913.745936] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the lease: (returnval){ [ 913.745936] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52afbb8c-3d89-9ae2-65ce-066fa8ae82d3" [ 913.745936] env[62585]: _type = "HttpNfcLease" [ 913.745936] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 913.755173] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 913.755173] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52afbb8c-3d89-9ae2-65ce-066fa8ae82d3" [ 913.755173] env[62585]: _type = "HttpNfcLease" [ 913.755173] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 913.810118] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384908, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.013550] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.014992] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c72d4fd1-fc96-4f4f-89b9-ef956d2ae719 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.020191] env[62585]: DEBUG nova.network.neutron [-] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.023074] env[62585]: DEBUG oslo_vmware.api [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 914.023074] env[62585]: value = "task-1384910" [ 914.023074] env[62585]: _type = "Task" [ 914.023074] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.033028] env[62585]: DEBUG oslo_vmware.api [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.052732] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc3ff4fc-23af-4ff4-a7e1-2c1c1a7612b1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.063139] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b0de9c-26d9-4636-8062-bebb3df033fa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.077984] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.078504] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 914.081821] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.171s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.083301] env[62585]: INFO nova.compute.claims [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.108064] env[62585]: DEBUG nova.compute.manager [req-8dc1315f-c49c-4636-8e1e-513336749106 req-1c97a36b-736b-4884-b633-4164ededabf3 service nova] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Detach interface failed, port_id=c92c2d1e-3117-42a1-a5d2-3de9eba6e107, reason: Instance d96a04d7-b07f-439d-aafa-09dc70a4d1a7 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 914.205153] env[62585]: DEBUG nova.objects.instance [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lazy-loading 'flavor' on Instance uuid 70ac6289-2f14-4fb0-a811-97d76cafc532 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.255579] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 914.255579] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52afbb8c-3d89-9ae2-65ce-066fa8ae82d3" [ 914.255579] env[62585]: _type = "HttpNfcLease" [ 914.255579] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 914.255971] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 914.255971] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52afbb8c-3d89-9ae2-65ce-066fa8ae82d3" [ 914.255971] env[62585]: _type = "HttpNfcLease" [ 914.255971] env[62585]: }. {{(pid=62585) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 914.256764] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abd2244-2bc5-4c11-852e-efae1e71703f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.268845] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b92888-f4a8-01f9-9ba9-020698fbbefc/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 914.269231] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b92888-f4a8-01f9-9ba9-020698fbbefc/disk-0.vmdk for reading. {{(pid=62585) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 914.350669] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384908, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.378275] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c32f4cbb-9677-4a20-8d7b-8ecbb8cdbe36 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.461872] env[62585]: DEBUG nova.network.neutron [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.527020] env[62585]: INFO nova.compute.manager [-] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Took 1.27 seconds to deallocate network for instance. [ 914.537754] env[62585]: DEBUG oslo_vmware.api [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384910, 'name': PowerOffVM_Task, 'duration_secs': 0.173534} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.538253] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.538587] env[62585]: DEBUG nova.compute.manager [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 914.539450] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c33aad-af0c-44d0-a03f-cb8d14d68bc5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.583342] env[62585]: DEBUG nova.compute.utils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 914.585081] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 914.585255] env[62585]: DEBUG nova.network.neutron [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.640134] env[62585]: DEBUG nova.policy [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28834cc42f8a49cebca5647badabf8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c49ab537d42244f495aaa3cbdaafc6b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 914.710583] env[62585]: DEBUG oslo_concurrency.lockutils [None req-06a41c15-958f-4bd6-9f79-2f7c052636fd tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.827s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.846315] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.846794] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.848399] env[62585]: DEBUG oslo_vmware.api [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384908, 'name': PowerOnVM_Task, 'duration_secs': 1.419889} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.849149] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.849486] env[62585]: INFO nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Took 11.35 seconds to spawn the instance on the hypervisor. [ 914.849772] env[62585]: DEBUG nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 914.850755] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aff2d4-fcb5-4e89-b9b5-e6ef9f068150 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.902505] env[62585]: DEBUG nova.network.neutron [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Successfully created port: 1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.964937] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.976824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.976824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.976824] env[62585]: DEBUG nova.network.neutron [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.992475] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ef450f939dfa1b7a8624ea16d94294e5',container_format='bare',created_at=2024-10-31T10:05:35Z,direct_url=,disk_format='vmdk',id=5d78a50d-b3a6-4aa7-8847-eb087b11a97e,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1087454638-shelved',owner='9645866ca8f0433cae30cf5867244ca8',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-10-31T10:05:50Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 914.993000] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 914.993718] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.994086] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 914.994443] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.994716] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 914.995073] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 914.995355] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 914.995653] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 914.997638] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 914.997638] env[62585]: DEBUG nova.virt.hardware [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 914.997638] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fe4a5e-0554-43d3-a310-d85fe31f7cbc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.007952] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8389cde-4cd8-455b-abbb-9e4a048ff782 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.027437] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:3b:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc3d19ab-ba98-4935-9e08-61c5df21be43', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.036509] env[62585]: DEBUG oslo.service.loopingcall [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.038175] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.038509] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.038802] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0412054-32ba-4d13-829c-a9345c859c3f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.070380] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.070380] env[62585]: value = "task-1384911" [ 915.070380] env[62585]: _type = "Task" [ 915.070380] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.071259] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d09ece69-57be-400a-9a3f-41f63997963a tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.580s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.089231] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 915.092870] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384911, 'name': CreateVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.350373] env[62585]: INFO nova.compute.manager [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Detaching volume aa1719fc-04bb-46a3-84c3-9f3625281f3d [ 915.373579] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f154c9-6977-4c27-9cbb-8bf476e01ce9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.375964] env[62585]: INFO nova.compute.manager [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Took 22.24 seconds to build instance. [ 915.384818] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d3cc2c-b286-4c6b-83a9-09a0a390406b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.424441] env[62585]: INFO nova.virt.block_device [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Attempting to driver detach volume aa1719fc-04bb-46a3-84c3-9f3625281f3d from mountpoint /dev/sdb [ 915.425697] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Volume detach. Driver type: vmdk {{(pid=62585) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 915.425905] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294040', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'name': 'volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '70ac6289-2f14-4fb0-a811-97d76cafc532', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'serial': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 915.426756] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329ba43d-8fd0-4ebc-9eee-09bde3b0498d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.432569] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c35768-e62f-4b9b-b367-d59a6e4ea1f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.460223] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff469a8-9803-40c0-865f-0cdaa0a59b43 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.465571] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc48bdc-b424-4d77-b185-5cee207ed462 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.479886] env[62585]: DEBUG nova.compute.provider_tree [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.487561] env[62585]: DEBUG nova.compute.manager [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.487780] env[62585]: DEBUG nova.compute.manager [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing instance network info cache due to event network-changed-bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 915.488136] env[62585]: DEBUG oslo_concurrency.lockutils [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.488314] env[62585]: DEBUG oslo_concurrency.lockutils [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.488498] env[62585]: DEBUG nova.network.neutron [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Refreshing network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.490871] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8fd714-0b3a-46b6-9829-e3d1fdfc8831 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.517717] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae62e85-72a1-4bb6-859d-7019a1a07d2c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.537750] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] The volume has not been displaced from its original location: [datastore1] volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d/volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d.vmdk. No consolidation needed. {{(pid=62585) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 915.543132] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 915.543595] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2137fe82-fddd-4c44-9676-196d41ddc876 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.571566] env[62585]: DEBUG oslo_vmware.api [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 915.571566] env[62585]: value = "task-1384912" [ 915.571566] env[62585]: _type = "Task" [ 915.571566] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.584093] env[62585]: DEBUG oslo_vmware.api [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384912, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.587383] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384911, 'name': CreateVM_Task, 'duration_secs': 0.427161} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.587696] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.588495] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.588754] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.589350] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 915.589662] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d53dc10-74ff-4443-a395-583f28813fdb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.596112] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 915.596112] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52381a59-e1d8-f853-90ad-81f0a62268c1" [ 915.596112] env[62585]: _type = "Task" [ 915.596112] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.611020] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52381a59-e1d8-f853-90ad-81f0a62268c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.879649] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6d595d8a-c1c1-478c-9286-36fc706e6aef tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "01941b61-1960-4360-9dd0-513d5597bc70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.756s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.993067] env[62585]: DEBUG nova.scheduler.client.report [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.012751] env[62585]: INFO nova.network.neutron [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Port 4b5af1c5-20c1-446c-aad5-023ac683f7e8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 916.013200] env[62585]: DEBUG nova.network.neutron [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.082834] env[62585]: DEBUG oslo_vmware.api [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384912, 'name': ReconfigVM_Task, 'duration_secs': 0.329779} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.083475] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 916.088847] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e49c3267-1d85-4ac8-aa2f-89487dc41198 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.107094] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 916.113095] env[62585]: DEBUG oslo_vmware.api [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 916.113095] env[62585]: value = "task-1384913" [ 916.113095] env[62585]: _type = "Task" [ 916.113095] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.120279] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.120732] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Processing image 5d78a50d-b3a6-4aa7-8847-eb087b11a97e {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.121064] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.121440] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.121697] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.122038] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5da15b33-4213-4ff2-92ba-6a5a128c02a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.135615] env[62585]: DEBUG oslo_vmware.api [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384913, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.137953] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 916.138623] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 916.138820] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.139166] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 916.139453] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.139662] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 916.140140] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 916.140400] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 916.140594] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 916.140839] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 916.141166] env[62585]: DEBUG nova.virt.hardware [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 916.143346] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a7f01b-e916-4e1d-8661-83b4ea740adb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.146538] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.146669] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.152109] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94b9a0c4-d4aa-48fd-a2ea-e04d28a7819f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.161932] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0d8292-83d6-4720-8836-e7d0ff7a1382 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.167553] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 916.167553] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52760d6b-01da-e314-8cd4-7ca0932d1d75" [ 916.167553] env[62585]: _type = "Task" [ 916.167553] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.189454] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Preparing fetch location {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 916.189814] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Fetch image to [datastore2] OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d/OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d.vmdk {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 916.190125] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Downloading stream optimized image 5d78a50d-b3a6-4aa7-8847-eb087b11a97e to [datastore2] OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d/OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d.vmdk on the data store datastore2 as vApp {{(pid=62585) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 916.190336] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Downloading image file data 5d78a50d-b3a6-4aa7-8847-eb087b11a97e to the ESX as VM named 'OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d' {{(pid=62585) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 916.273731] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 916.273731] env[62585]: value = "resgroup-9" [ 916.273731] env[62585]: _type = "ResourcePool" [ 916.273731] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 916.274164] env[62585]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4d48cbc6-45de-45a7-bd06-73fe72ab85a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.298920] env[62585]: DEBUG nova.objects.instance [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lazy-loading 'flavor' on Instance uuid 4b080cc3-e1cc-4b64-9926-c37b891444f5 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.308514] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lease: (returnval){ [ 916.308514] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52112316-28d0-9fab-5d56-71437fab5dc4" [ 916.308514] env[62585]: _type = "HttpNfcLease" [ 916.308514] env[62585]: } obtained for vApp import into resource pool (val){ [ 916.308514] env[62585]: value = "resgroup-9" [ 916.308514] env[62585]: _type = "ResourcePool" [ 916.308514] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 916.309207] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the lease: (returnval){ [ 916.309207] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52112316-28d0-9fab-5d56-71437fab5dc4" [ 916.309207] env[62585]: _type = "HttpNfcLease" [ 916.309207] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 916.318935] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 916.318935] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52112316-28d0-9fab-5d56-71437fab5dc4" [ 916.318935] env[62585]: _type = "HttpNfcLease" [ 916.318935] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 916.367394] env[62585]: DEBUG nova.network.neutron [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updated VIF entry in instance network info cache for port bc3d19ab-ba98-4935-9e08-61c5df21be43. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 916.367842] env[62585]: DEBUG nova.network.neutron [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.430339] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "01941b61-1960-4360-9dd0-513d5597bc70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.430943] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "01941b61-1960-4360-9dd0-513d5597bc70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.430943] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "01941b61-1960-4360-9dd0-513d5597bc70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.431164] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "01941b61-1960-4360-9dd0-513d5597bc70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.431409] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "01941b61-1960-4360-9dd0-513d5597bc70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.434059] env[62585]: INFO nova.compute.manager [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Terminating instance [ 916.436070] env[62585]: DEBUG nova.compute.manager [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 916.436314] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 916.437230] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0530e7-c21e-4a74-8e78-e2a81fe7e8bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.447273] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 916.447273] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f8369e0-7c04-46f6-9c2d-23e507031500 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.456194] env[62585]: DEBUG oslo_vmware.api [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 916.456194] env[62585]: value = "task-1384915" [ 916.456194] env[62585]: _type = "Task" [ 916.456194] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.467190] env[62585]: DEBUG oslo_vmware.api [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.502732] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.503416] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 916.507075] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.324s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.508871] env[62585]: INFO nova.compute.claims [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.516323] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.603182] env[62585]: DEBUG nova.compute.manager [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-changed-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.604358] env[62585]: DEBUG nova.compute.manager [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing instance network info cache due to event network-changed-840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 916.604959] env[62585]: DEBUG oslo_concurrency.lockutils [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] Acquiring lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.605557] env[62585]: DEBUG oslo_concurrency.lockutils [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] Acquired lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.605924] env[62585]: DEBUG nova.network.neutron [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Refreshing network info cache for port 840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.629389] env[62585]: DEBUG oslo_vmware.api [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384913, 'name': ReconfigVM_Task, 'duration_secs': 0.17583} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.629774] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294040', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'name': 'volume-aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '70ac6289-2f14-4fb0-a811-97d76cafc532', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d', 'serial': 'aa1719fc-04bb-46a3-84c3-9f3625281f3d'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 916.643040] env[62585]: DEBUG nova.network.neutron [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Successfully updated port: 1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.804068] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.804260] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquired lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.804535] env[62585]: DEBUG nova.network.neutron [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.804842] env[62585]: DEBUG nova.objects.instance [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lazy-loading 'info_cache' on Instance uuid 4b080cc3-e1cc-4b64-9926-c37b891444f5 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.819930] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 916.819930] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52112316-28d0-9fab-5d56-71437fab5dc4" [ 916.819930] env[62585]: _type = "HttpNfcLease" [ 916.819930] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 916.871397] env[62585]: DEBUG oslo_concurrency.lockutils [req-ca2c41e0-0e2b-4aba-b54f-c526514aa413 req-8f036bad-f6dc-4faf-ace5-d25dd04712e1 service nova] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.969150] env[62585]: DEBUG oslo_vmware.api [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384915, 'name': PowerOffVM_Task, 'duration_secs': 0.230936} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.969455] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 916.969643] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 916.969936] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4b375e4-5fc8-4ecb-b7a6-81596603bcae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.009335] env[62585]: DEBUG nova.compute.utils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 917.010915] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 917.011107] env[62585]: DEBUG nova.network.neutron [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.021341] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9802bc28-f8f5-447a-a112-c03208cac760 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-a634a80e-d90a-4ce3-8233-75657a7754be-4b5af1c5-20c1-446c-aad5-023ac683f7e8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.028s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.051319] env[62585]: DEBUG nova.policy [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01befe1db3684d60943c74da2c2c9fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f00751679b29472e9ab92c9e48a99925', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 917.109025] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.109248] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.109478] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleting the datastore file [datastore1] 01941b61-1960-4360-9dd0-513d5597bc70 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.112136] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57a96efe-f989-4f28-b855-49d4d7059c69 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.121843] env[62585]: DEBUG oslo_vmware.api [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 917.121843] env[62585]: value = "task-1384917" [ 917.121843] env[62585]: _type = "Task" [ 917.121843] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.135220] env[62585]: DEBUG oslo_vmware.api [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.151022] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-3abb84ea-b613-4956-a64f-c4ad230343c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.151022] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-3abb84ea-b613-4956-a64f-c4ad230343c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.151022] env[62585]: DEBUG nova.network.neutron [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.192535] env[62585]: DEBUG nova.objects.instance [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lazy-loading 'flavor' on Instance uuid 70ac6289-2f14-4fb0-a811-97d76cafc532 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.312107] env[62585]: DEBUG nova.objects.base [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Object Instance<4b080cc3-e1cc-4b64-9926-c37b891444f5> lazy-loaded attributes: flavor,info_cache {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 917.313902] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "interface-ddb1103d-a846-4229-b441-de45424b4ec9-4b5af1c5-20c1-446c-aad5-023ac683f7e8" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.314295] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-ddb1103d-a846-4229-b441-de45424b4ec9-4b5af1c5-20c1-446c-aad5-023ac683f7e8" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.314828] env[62585]: DEBUG nova.objects.instance [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lazy-loading 'flavor' on Instance uuid ddb1103d-a846-4229-b441-de45424b4ec9 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.325370] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 917.325370] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52112316-28d0-9fab-5d56-71437fab5dc4" [ 917.325370] env[62585]: _type = "HttpNfcLease" [ 917.325370] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 917.325699] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 917.325699] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52112316-28d0-9fab-5d56-71437fab5dc4" [ 917.325699] env[62585]: _type = "HttpNfcLease" [ 917.325699] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 917.326504] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a083479-b45b-4c94-aa8d-9c55ad137b36 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.337115] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5296dc53-9ca7-fbee-74dc-0a939837dbad/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 917.337461] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5296dc53-9ca7-fbee-74dc-0a939837dbad/disk-0.vmdk. {{(pid=62585) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 917.408644] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f3dbe4ce-b895-4fe1-8bea-5aad08b6637e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.514253] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 917.632584] env[62585]: DEBUG oslo_vmware.api [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159208} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.635397] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 917.635623] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 917.635820] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 917.639019] env[62585]: INFO nova.compute.manager [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Took 1.20 seconds to destroy the instance on the hypervisor. [ 917.639019] env[62585]: DEBUG oslo.service.loopingcall [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.639019] env[62585]: DEBUG nova.compute.manager [-] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 917.639019] env[62585]: DEBUG nova.network.neutron [-] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 917.645476] env[62585]: DEBUG nova.network.neutron [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updated VIF entry in instance network info cache for port 840822b3-e947-451f-90bf-03eafebebf95. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.646252] env[62585]: DEBUG nova.network.neutron [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [{"id": "840822b3-e947-451f-90bf-03eafebebf95", "address": "fa:16:3e:f8:2a:80", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap840822b3-e9", "ovs_interfaceid": "840822b3-e947-451f-90bf-03eafebebf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.712883] env[62585]: DEBUG nova.network.neutron [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.737734] env[62585]: DEBUG nova.network.neutron [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Successfully created port: 5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.799443] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e78140d-3b21-4f7f-90b8-04446b9387d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.812290] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be59a84-d8fd-42d4-8234-cdcab8ec046f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.867568] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b46003-e07b-4b76-851e-3fa0ec661bc4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.882513] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f42181-5800-4df7-84d3-98ddd6b59e22 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.902465] env[62585]: DEBUG nova.compute.provider_tree [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.977330] env[62585]: DEBUG nova.compute.manager [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Received event network-vif-plugged-1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.977558] env[62585]: DEBUG oslo_concurrency.lockutils [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] Acquiring lock "3abb84ea-b613-4956-a64f-c4ad230343c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.977831] env[62585]: DEBUG oslo_concurrency.lockutils [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.978130] env[62585]: DEBUG oslo_concurrency.lockutils [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.978276] env[62585]: DEBUG nova.compute.manager [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] No waiting events found dispatching network-vif-plugged-1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 917.978459] env[62585]: WARNING nova.compute.manager [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Received unexpected event network-vif-plugged-1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 for instance with vm_state building and task_state spawning. [ 917.978625] env[62585]: DEBUG nova.compute.manager [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Received event network-changed-1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.978782] env[62585]: DEBUG nova.compute.manager [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Refreshing instance network info cache due to event network-changed-1f9fc1f2-9662-4b22-be29-c0eb753bfbe5. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 917.978967] env[62585]: DEBUG oslo_concurrency.lockutils [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] Acquiring lock "refresh_cache-3abb84ea-b613-4956-a64f-c4ad230343c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.037433] env[62585]: DEBUG nova.objects.instance [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lazy-loading 'pci_requests' on Instance uuid ddb1103d-a846-4229-b441-de45424b4ec9 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.149391] env[62585]: DEBUG oslo_concurrency.lockutils [req-9ef9c26f-51b0-48f9-89a4-af449a5af410 req-33beb0ba-2d7f-457f-82b4-e62107c47044 service nova] Releasing lock "refresh_cache-a634a80e-d90a-4ce3-8233-75657a7754be" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.214338] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8606d1cb-fd23-436f-b442-eea45fcb7ac2 tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.367s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.230185] env[62585]: DEBUG nova.network.neutron [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Updating instance_info_cache with network_info: [{"id": "1f9fc1f2-9662-4b22-be29-c0eb753bfbe5", "address": "fa:16:3e:a1:dd:59", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f9fc1f2-96", "ovs_interfaceid": "1f9fc1f2-9662-4b22-be29-c0eb753bfbe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.414367] env[62585]: DEBUG nova.scheduler.client.report [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.430846] env[62585]: DEBUG nova.network.neutron [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Updating instance_info_cache with network_info: [{"id": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "address": "fa:16:3e:57:e9:e4", "network": {"id": "7ae3ebe7-9a7f-4139-a4c5-a66a0b7f0d27", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1276079323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbafceca6afd477e8afa38df5790b585", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144b0acd-f3", "ovs_interfaceid": "144b0acd-f385-4e1c-b8cc-8396bf7f7648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.495908] env[62585]: DEBUG nova.network.neutron [-] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.528906] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 918.540640] env[62585]: DEBUG nova.objects.base [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 918.540928] env[62585]: DEBUG nova.network.neutron [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 918.559476] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='2698806a410e0496229015ef0d780696',container_format='bare',created_at=2024-10-31T10:05:50Z,direct_url=,disk_format='vmdk',id=310691a0-fca0-4934-a7c2-2e7b96be6e6c,min_disk=1,min_ram=0,name='tempest-test-snap-1067498958',owner='f00751679b29472e9ab92c9e48a99925',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-31T10:06:09Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 918.559819] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 918.560093] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.560402] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 918.560678] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.560900] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 918.561218] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 918.561468] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 918.561715] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 918.562019] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 918.562265] env[62585]: DEBUG nova.virt.hardware [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 918.563465] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066c64e9-fe47-459d-8609-958fb64058e4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.573984] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac4f787-7bb1-4086-a854-f3bc82c21fc2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.664491] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Completed reading data from the image iterator. {{(pid=62585) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 918.664745] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5296dc53-9ca7-fbee-74dc-0a939837dbad/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 918.666168] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3698a05-418b-4c1b-8519-1b5ea5e87897 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.673953] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5296dc53-9ca7-fbee-74dc-0a939837dbad/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 918.674284] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5296dc53-9ca7-fbee-74dc-0a939837dbad/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 918.675059] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c1eb67c2-df3b-475a-8dc1-1d591abb2410 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.736093] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-3abb84ea-b613-4956-a64f-c4ad230343c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.736485] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Instance network_info: |[{"id": "1f9fc1f2-9662-4b22-be29-c0eb753bfbe5", "address": "fa:16:3e:a1:dd:59", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f9fc1f2-96", "ovs_interfaceid": "1f9fc1f2-9662-4b22-be29-c0eb753bfbe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 918.736826] env[62585]: DEBUG oslo_concurrency.lockutils [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] Acquired lock "refresh_cache-3abb84ea-b613-4956-a64f-c4ad230343c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.737032] env[62585]: DEBUG nova.network.neutron [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Refreshing network info cache for port 1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.738313] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:dd:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f9fc1f2-9662-4b22-be29-c0eb753bfbe5', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.745912] env[62585]: DEBUG oslo.service.loopingcall [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.749073] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.749623] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c480beea-831f-4821-a89a-98375335fb46 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.774794] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.774794] env[62585]: value = "task-1384918" [ 918.774794] env[62585]: _type = "Task" [ 918.774794] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.786497] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384918, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.887109] env[62585]: DEBUG oslo_vmware.rw_handles [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5296dc53-9ca7-fbee-74dc-0a939837dbad/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 918.887325] env[62585]: INFO nova.virt.vmwareapi.images [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Downloaded image file data 5d78a50d-b3a6-4aa7-8847-eb087b11a97e [ 918.888271] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2315ffb8-06b0-4a87-8df9-2b375a5c9ffc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.908474] env[62585]: DEBUG nova.policy [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f26abf4eaa71482b8fd3c6425a9c683d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48929b5f0c2c41ddade223ab57002fc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 918.912177] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e687f6f5-974e-4d3b-b705-c5c4c2900cd6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.918604] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.919135] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 918.921838] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.884s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.921974] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.937545] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Releasing lock "refresh_cache-4b080cc3-e1cc-4b64-9926-c37b891444f5" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.945195] env[62585]: INFO nova.virt.vmwareapi.images [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] The imported VM was unregistered [ 918.946731] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Caching image {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 918.946731] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.946731] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c00ac9a-1491-4d38-82cd-4e40a56b75d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.964032] env[62585]: INFO nova.scheduler.client.report [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted allocations for instance d96a04d7-b07f-439d-aafa-09dc70a4d1a7 [ 918.974829] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Created directory with path [datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.974829] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d/OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d.vmdk to [datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk. {{(pid=62585) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 918.974829] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-29e0aa73-35c9-40d8-afca-85d066b760ad {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.984167] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 918.984167] env[62585]: value = "task-1384920" [ 918.984167] env[62585]: _type = "Task" [ 918.984167] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.996482] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.001049] env[62585]: INFO nova.compute.manager [-] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Took 1.36 seconds to deallocate network for instance. [ 919.098792] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.099094] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.099328] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "70ac6289-2f14-4fb0-a811-97d76cafc532-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.099532] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.099740] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.102602] env[62585]: INFO nova.compute.manager [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Terminating instance [ 919.105011] env[62585]: DEBUG nova.compute.manager [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 919.105224] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.106106] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cb7495-71f0-4c68-99c4-3791344de4f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.117697] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.118014] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6255697-1d10-48f5-8fee-68e2a02e2353 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.126153] env[62585]: DEBUG oslo_vmware.api [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 919.126153] env[62585]: value = "task-1384921" [ 919.126153] env[62585]: _type = "Task" [ 919.126153] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.138655] env[62585]: DEBUG oslo_vmware.api [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384921, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.232799] env[62585]: DEBUG nova.network.neutron [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Updated VIF entry in instance network info cache for port 1f9fc1f2-9662-4b22-be29-c0eb753bfbe5. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.233203] env[62585]: DEBUG nova.network.neutron [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Updating instance_info_cache with network_info: [{"id": "1f9fc1f2-9662-4b22-be29-c0eb753bfbe5", "address": "fa:16:3e:a1:dd:59", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f9fc1f2-96", "ovs_interfaceid": "1f9fc1f2-9662-4b22-be29-c0eb753bfbe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.287702] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384918, 'name': CreateVM_Task, 'duration_secs': 0.421025} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.287976] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 919.291080] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.291295] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.291741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 919.292057] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-736aaad3-e357-4a73-afc2-02be535765d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.302632] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 919.302632] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520faccf-d105-8ab3-20ca-8bd9c8319abe" [ 919.302632] env[62585]: _type = "Task" [ 919.302632] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.320951] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520faccf-d105-8ab3-20ca-8bd9c8319abe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.404223] env[62585]: DEBUG nova.compute.manager [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.404389] env[62585]: DEBUG nova.compute.manager [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing instance network info cache due to event network-changed-b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.404617] env[62585]: DEBUG oslo_concurrency.lockutils [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.404762] env[62585]: DEBUG oslo_concurrency.lockutils [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.404920] env[62585]: DEBUG nova.network.neutron [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.425570] env[62585]: DEBUG nova.compute.utils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 919.427266] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 919.427456] env[62585]: DEBUG nova.network.neutron [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.443875] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.445037] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7db25ec3-10b0-4e58-994f-ebe4091fc340 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.457375] env[62585]: DEBUG oslo_vmware.api [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 919.457375] env[62585]: value = "task-1384922" [ 919.457375] env[62585]: _type = "Task" [ 919.457375] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.468503] env[62585]: DEBUG oslo_vmware.api [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.480622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6a531ae9-2a7a-4106-8b77-c84645e50cb1 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "d96a04d7-b07f-439d-aafa-09dc70a4d1a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.881s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.500955] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.506047] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.506490] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.506866] env[62585]: DEBUG nova.objects.instance [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lazy-loading 'resources' on Instance uuid 01941b61-1960-4360-9dd0-513d5597bc70 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.539462] env[62585]: DEBUG nova.policy [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9a2336e0b124f03ad700405bcad8f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b8936eaf754cbcbd1b099846a3146d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 919.639934] env[62585]: DEBUG oslo_vmware.api [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384921, 'name': PowerOffVM_Task, 'duration_secs': 0.264158} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.640345] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.640511] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.640777] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6e67229-110e-4087-b694-4161a8ca51af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.735709] env[62585]: DEBUG oslo_concurrency.lockutils [req-7585fef9-d2ff-4276-b050-b501fdce8ee8 req-69c9e3f4-aa57-439f-a95a-ccf585c307a7 service nova] Releasing lock "refresh_cache-3abb84ea-b613-4956-a64f-c4ad230343c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.818256] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520faccf-d105-8ab3-20ca-8bd9c8319abe, 'name': SearchDatastore_Task, 'duration_secs': 0.077652} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.818700] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.819041] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.819312] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.819481] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.819695] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.820074] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74930aee-82a6-411b-b455-f79d1d6a6435 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.841709] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.841959] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 919.843079] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba4aca94-ecd3-4226-a9c5-7d162a96a6d1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.853208] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 919.853208] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524beb58-f1c1-cde1-eb08-097427c61440" [ 919.853208] env[62585]: _type = "Task" [ 919.853208] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.865057] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524beb58-f1c1-cde1-eb08-097427c61440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.931520] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 919.971272] env[62585]: DEBUG oslo_vmware.api [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384922, 'name': PowerOnVM_Task, 'duration_secs': 0.499001} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.971647] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.971786] env[62585]: DEBUG nova.compute.manager [None req-dba638b1-0f3a-4281-882c-a1c5409ad0b3 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 919.972624] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d944ee62-a6aa-4ed8-8754-c7176b9e09e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.004947] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.193785] env[62585]: DEBUG nova.network.neutron [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updated VIF entry in instance network info cache for port b29379d9-a516-40cd-b7f0-35505b917bcb. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 920.193785] env[62585]: DEBUG nova.network.neutron [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.221545] env[62585]: DEBUG nova.network.neutron [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Successfully updated port: 5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.232508] env[62585]: DEBUG nova.network.neutron [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Successfully created port: 52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.241168] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63578aca-a829-47d5-ba23-7edb929756ef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.254533] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be964c10-340d-4bc2-bed5-b1107083038d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.297440] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c7dbaa-248a-4400-9f8e-6e762dc12bc2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.311347] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929df329-b092-4b68-b308-7b59b2e6e2c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.332765] env[62585]: DEBUG nova.compute.provider_tree [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.370024] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]524beb58-f1c1-cde1-eb08-097427c61440, 'name': SearchDatastore_Task, 'duration_secs': 0.097147} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.370024] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22dd77a6-36cb-4c0e-a640-21f36fc371b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.377666] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 920.377666] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52ace4b7-56e9-deb5-2933-0a1de23a829b" [ 920.377666] env[62585]: _type = "Task" [ 920.377666] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.390359] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52ace4b7-56e9-deb5-2933-0a1de23a829b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.506298] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.666341] env[62585]: DEBUG nova.compute.manager [req-f2099327-7845-4512-b849-ba73e5a58c8e req-eb8e1f14-f109-4658-922d-31bc1d4f4dd5 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Received event network-vif-plugged-5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.666341] env[62585]: DEBUG oslo_concurrency.lockutils [req-f2099327-7845-4512-b849-ba73e5a58c8e req-eb8e1f14-f109-4658-922d-31bc1d4f4dd5 service nova] Acquiring lock "b6186aef-8f4c-409a-83aa-1548545ea7c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.666341] env[62585]: DEBUG oslo_concurrency.lockutils [req-f2099327-7845-4512-b849-ba73e5a58c8e req-eb8e1f14-f109-4658-922d-31bc1d4f4dd5 service nova] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.666341] env[62585]: DEBUG oslo_concurrency.lockutils [req-f2099327-7845-4512-b849-ba73e5a58c8e req-eb8e1f14-f109-4658-922d-31bc1d4f4dd5 service nova] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.666341] env[62585]: DEBUG nova.compute.manager [req-f2099327-7845-4512-b849-ba73e5a58c8e req-eb8e1f14-f109-4658-922d-31bc1d4f4dd5 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] No waiting events found dispatching network-vif-plugged-5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 920.667023] env[62585]: WARNING nova.compute.manager [req-f2099327-7845-4512-b849-ba73e5a58c8e req-eb8e1f14-f109-4658-922d-31bc1d4f4dd5 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Received unexpected event network-vif-plugged-5fe6ba89-b6bd-4982-bac0-2c93b5697204 for instance with vm_state building and task_state spawning. [ 920.699382] env[62585]: DEBUG oslo_concurrency.lockutils [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.700578] env[62585]: DEBUG nova.compute.manager [req-175c5269-7b57-4e56-b4f3-1fd84bc7cd1c req-1c8d8c58-cf68-42e1-82fd-e3fd5f3f5792 service nova] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Received event network-vif-deleted-795b88c3-09ab-44aa-bb6f-8bd339ffc0de {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.730613] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "refresh_cache-b6186aef-8f4c-409a-83aa-1548545ea7c4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.730613] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "refresh_cache-b6186aef-8f4c-409a-83aa-1548545ea7c4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.730613] env[62585]: DEBUG nova.network.neutron [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.841051] env[62585]: DEBUG nova.scheduler.client.report [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 920.893906] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52ace4b7-56e9-deb5-2933-0a1de23a829b, 'name': SearchDatastore_Task, 'duration_secs': 0.093987} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.894234] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.894503] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 3abb84ea-b613-4956-a64f-c4ad230343c2/3abb84ea-b613-4956-a64f-c4ad230343c2.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 920.894798] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4b2f890-ace4-4f12-b660-5e9f8e187399 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.908212] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 920.908212] env[62585]: value = "task-1384924" [ 920.908212] env[62585]: _type = "Task" [ 920.908212] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.920163] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.942380] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 920.968841] env[62585]: DEBUG nova.network.neutron [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Successfully updated port: 4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.977008] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 920.977200] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 920.977470] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.977623] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 920.977733] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.977886] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 920.978118] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 920.978289] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 920.978454] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 920.978617] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 920.978800] env[62585]: DEBUG nova.virt.hardware [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 920.979757] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4592808c-da2c-4dcf-90b3-4ff85edd55b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.997273] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b727d1fe-a5b7-4241-916e-90a2e7d2d544 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.011430] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.269409] env[62585]: DEBUG nova.network.neutron [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.348242] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.376334] env[62585]: INFO nova.scheduler.client.report [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance 01941b61-1960-4360-9dd0-513d5597bc70 [ 921.420486] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.471293] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.471589] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.471747] env[62585]: DEBUG nova.network.neutron [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.497740] env[62585]: DEBUG nova.network.neutron [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Updating instance_info_cache with network_info: [{"id": "5fe6ba89-b6bd-4982-bac0-2c93b5697204", "address": "fa:16:3e:81:dc:13", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fe6ba89-b6", "ovs_interfaceid": "5fe6ba89-b6bd-4982-bac0-2c93b5697204", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.508395] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.888384] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fe5374d1-446e-4a9d-8f65-40c59f5c7a7b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "01941b61-1960-4360-9dd0-513d5597bc70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.458s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.924764] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384924, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.000316] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "refresh_cache-b6186aef-8f4c-409a-83aa-1548545ea7c4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.000873] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Instance network_info: |[{"id": "5fe6ba89-b6bd-4982-bac0-2c93b5697204", "address": "fa:16:3e:81:dc:13", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fe6ba89-b6", "ovs_interfaceid": "5fe6ba89-b6bd-4982-bac0-2c93b5697204", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 922.001399] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:dc:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fe6ba89-b6bd-4982-bac0-2c93b5697204', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.009711] env[62585]: DEBUG oslo.service.loopingcall [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.013868] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.014231] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384920, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.758574} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.014454] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a575fa2-f18d-441a-8f5e-f7f3355d385c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.030510] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d/OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d.vmdk to [datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk. [ 922.030786] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Cleaning up location [datastore2] OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 922.030981] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_6836f257-d47c-4691-b1fb-8790d812c22d {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.031603] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66f19bbd-1968-40af-83e3-86900fbec17c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.043868] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 922.043868] env[62585]: value = "task-1384925" [ 922.043868] env[62585]: _type = "Task" [ 922.043868] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.044900] env[62585]: WARNING nova.network.neutron [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] 19cc8f17-f362-4186-901c-3dc61c1ef3e5 already exists in list: networks containing: ['19cc8f17-f362-4186-901c-3dc61c1ef3e5']. ignoring it [ 922.049567] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.049567] env[62585]: value = "task-1384926" [ 922.049567] env[62585]: _type = "Task" [ 922.049567] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.063214] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.070802] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384926, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.361717] env[62585]: DEBUG nova.network.neutron [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Successfully updated port: 52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.420918] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384924, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.228199} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.421281] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 3abb84ea-b613-4956-a64f-c4ad230343c2/3abb84ea-b613-4956-a64f-c4ad230343c2.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 922.421421] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 922.421770] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62c489ad-9c45-44b2-a092-6490fadd1fe5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.430938] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 922.430938] env[62585]: value = "task-1384927" [ 922.430938] env[62585]: _type = "Task" [ 922.430938] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.446340] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.562596] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107677} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.563274] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.563457] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.563704] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk to [datastore2] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 922.563967] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0723dd40-b94a-4883-831f-d987ec2a424a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.568908] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384926, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.575375] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 922.575375] env[62585]: value = "task-1384928" [ 922.575375] env[62585]: _type = "Task" [ 922.575375] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.583939] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.593543] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.593785] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.593971] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Deleting the datastore file [datastore1] 70ac6289-2f14-4fb0-a811-97d76cafc532 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.594369] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b32b21a5-4b03-4c81-87de-b9462059497e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.601417] env[62585]: DEBUG oslo_vmware.api [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for the task: (returnval){ [ 922.601417] env[62585]: value = "task-1384929" [ 922.601417] env[62585]: _type = "Task" [ 922.601417] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.610401] env[62585]: DEBUG oslo_vmware.api [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.611550] env[62585]: DEBUG nova.network.neutron [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "address": "fa:16:3e:21:aa:89", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5af1c5-20", "ovs_interfaceid": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.864468] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "refresh_cache-54f542b5-3aba-49d6-a487-62714416b86f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.864780] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "refresh_cache-54f542b5-3aba-49d6-a487-62714416b86f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.864993] env[62585]: DEBUG nova.network.neutron [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.943835] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077142} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.944299] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.945420] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffde0f2e-f65e-425d-886e-dc941c58549a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.980132] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 3abb84ea-b613-4956-a64f-c4ad230343c2/3abb84ea-b613-4956-a64f-c4ad230343c2.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.980607] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21af91e9-114a-423e-91ad-180e0ee4c220 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.019866] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 923.019866] env[62585]: value = "task-1384930" [ 923.019866] env[62585]: _type = "Task" [ 923.019866] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.032863] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.067568] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384926, 'name': CreateVM_Task, 'duration_secs': 0.833034} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.067840] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.068709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.068952] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.069446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 923.069774] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb71794b-5893-4ba4-a33c-d780e33de95f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.082426] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 923.082426] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eb5fa7-c05f-c045-ad63-5b8fc0959778" [ 923.082426] env[62585]: _type = "Task" [ 923.082426] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.089846] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.102989] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eb5fa7-c05f-c045-ad63-5b8fc0959778, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.114348] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.114773] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.114939] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.115242] env[62585]: DEBUG oslo_vmware.api [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Task: {'id': task-1384929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357003} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.115984] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6489da-84fb-476e-b2da-3650eb6d5e4e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.118882] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.119095] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.119283] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.119463] env[62585]: INFO nova.compute.manager [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Took 4.01 seconds to destroy the instance on the hypervisor. [ 923.119707] env[62585]: DEBUG oslo.service.loopingcall [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.119927] env[62585]: DEBUG nova.compute.manager [-] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 923.120109] env[62585]: DEBUG nova.network.neutron [-] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.137243] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 923.137710] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 923.137710] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.138516] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 923.138516] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.138516] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 923.138516] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 923.138730] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 923.138730] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 923.138886] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 923.139094] env[62585]: DEBUG nova.virt.hardware [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 923.145455] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Reconfiguring VM to attach interface {{(pid=62585) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 923.146401] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79bcc32b-c5b8-4e81-abc9-4eee0d8820a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.170274] env[62585]: DEBUG oslo_vmware.api [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 923.170274] env[62585]: value = "task-1384931" [ 923.170274] env[62585]: _type = "Task" [ 923.170274] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.182526] env[62585]: DEBUG oslo_vmware.api [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384931, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.532821] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.590638] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.600169] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.600482] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Processing image 310691a0-fca0-4934-a7c2-2e7b96be6e6c {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.600730] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.600876] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.601247] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.601666] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a8ef860-8900-4c20-83b2-d4509874889d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.622527] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.622841] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.623670] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d0f126f-058f-4c37-8e2e-ac0237d7fcc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.632642] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 923.632642] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5232813c-9aae-7067-cb9a-3928d4a610b2" [ 923.632642] env[62585]: _type = "Task" [ 923.632642] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.644718] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5232813c-9aae-7067-cb9a-3928d4a610b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.654552] env[62585]: DEBUG nova.network.neutron [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.689413] env[62585]: DEBUG oslo_vmware.api [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.980841] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.980841] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.990540] env[62585]: DEBUG nova.network.neutron [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Updating instance_info_cache with network_info: [{"id": "52550d14-1f84-4991-83fc-b68ce7d6200f", "address": "fa:16:3e:29:ab:dd", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52550d14-1f", "ovs_interfaceid": "52550d14-1f84-4991-83fc-b68ce7d6200f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.036309] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.090193] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.147117] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Preparing fetch location {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 924.147428] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Fetch image to [datastore2] OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d/OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d.vmdk {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 924.147620] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Downloading stream optimized image 310691a0-fca0-4934-a7c2-2e7b96be6e6c to [datastore2] OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d/OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d.vmdk on the data store datastore2 as vApp {{(pid=62585) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 924.147903] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Downloading image file data 310691a0-fca0-4934-a7c2-2e7b96be6e6c to the ESX as VM named 'OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d' {{(pid=62585) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 924.156259] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Received event network-changed-5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.156504] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Refreshing instance network info cache due to event network-changed-5fe6ba89-b6bd-4982-bac0-2c93b5697204. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 924.156695] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquiring lock "refresh_cache-b6186aef-8f4c-409a-83aa-1548545ea7c4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.156850] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquired lock "refresh_cache-b6186aef-8f4c-409a-83aa-1548545ea7c4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.157017] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Refreshing network info cache for port 5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.187430] env[62585]: DEBUG oslo_vmware.api [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.249178] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 924.249178] env[62585]: value = "resgroup-9" [ 924.249178] env[62585]: _type = "ResourcePool" [ 924.249178] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 924.249637] env[62585]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a539b793-7590-4bed-b8cb-a67959d46005 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.276652] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lease: (returnval){ [ 924.276652] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 924.276652] env[62585]: _type = "HttpNfcLease" [ 924.276652] env[62585]: } obtained for vApp import into resource pool (val){ [ 924.276652] env[62585]: value = "resgroup-9" [ 924.276652] env[62585]: _type = "ResourcePool" [ 924.276652] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 924.276652] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the lease: (returnval){ [ 924.276652] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 924.276652] env[62585]: _type = "HttpNfcLease" [ 924.276652] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 924.287491] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 924.287491] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 924.287491] env[62585]: _type = "HttpNfcLease" [ 924.287491] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 924.484388] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 924.493498] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "refresh_cache-54f542b5-3aba-49d6-a487-62714416b86f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.494281] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Instance network_info: |[{"id": "52550d14-1f84-4991-83fc-b68ce7d6200f", "address": "fa:16:3e:29:ab:dd", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52550d14-1f", "ovs_interfaceid": "52550d14-1f84-4991-83fc-b68ce7d6200f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 924.494439] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:ab:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52550d14-1f84-4991-83fc-b68ce7d6200f', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.503167] env[62585]: DEBUG oslo.service.loopingcall [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.503167] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.503167] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b251db1d-add0-476b-994c-fe2d614d1aba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.530593] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.530593] env[62585]: value = "task-1384933" [ 924.530593] env[62585]: _type = "Task" [ 924.530593] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.537307] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.547073] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384933, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.593741] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.684026] env[62585]: DEBUG oslo_vmware.api [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.709075] env[62585]: DEBUG nova.network.neutron [-] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.792960] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 924.792960] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 924.792960] env[62585]: _type = "HttpNfcLease" [ 924.792960] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 924.987810] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Updated VIF entry in instance network info cache for port 5fe6ba89-b6bd-4982-bac0-2c93b5697204. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.988261] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Updating instance_info_cache with network_info: [{"id": "5fe6ba89-b6bd-4982-bac0-2c93b5697204", "address": "fa:16:3e:81:dc:13", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fe6ba89-b6", "ovs_interfaceid": "5fe6ba89-b6bd-4982-bac0-2c93b5697204", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.010295] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.010588] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.012198] env[62585]: INFO nova.compute.claims [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.038947] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.048762] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384933, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.093562] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.186579] env[62585]: DEBUG oslo_vmware.api [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384931, 'name': ReconfigVM_Task, 'duration_secs': 1.649748} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.187190] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.187470] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Reconfigured VM to attach interface {{(pid=62585) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 925.211419] env[62585]: INFO nova.compute.manager [-] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Took 2.09 seconds to deallocate network for instance. [ 925.289483] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 925.289483] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 925.289483] env[62585]: _type = "HttpNfcLease" [ 925.289483] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 925.493286] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Releasing lock "refresh_cache-b6186aef-8f4c-409a-83aa-1548545ea7c4" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.493575] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-vif-plugged-4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.493786] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.494010] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.494271] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.494444] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] No waiting events found dispatching network-vif-plugged-4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 925.494641] env[62585]: WARNING nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received unexpected event network-vif-plugged-4b5af1c5-20c1-446c-aad5-023ac683f7e8 for instance with vm_state active and task_state None. [ 925.494811] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-changed-4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.494969] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing instance network info cache due to event network-changed-4b5af1c5-20c1-446c-aad5-023ac683f7e8. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 925.495186] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.495325] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.495485] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Refreshing network info cache for port 4b5af1c5-20c1-446c-aad5-023ac683f7e8 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 925.536710] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.546261] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384933, 'name': CreateVM_Task, 'duration_secs': 0.926721} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.546486] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.547166] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.547352] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.547691] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 925.547989] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5f60d28-e016-44da-8650-be4048b2a6e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.554665] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 925.554665] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5215ee0a-cbee-4aa5-c369-162228f3999f" [ 925.554665] env[62585]: _type = "Task" [ 925.554665] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.564432] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5215ee0a-cbee-4aa5-c369-162228f3999f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.593053] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384928, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.885576} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.593354] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5d78a50d-b3a6-4aa7-8847-eb087b11a97e/5d78a50d-b3a6-4aa7-8847-eb087b11a97e.vmdk to [datastore2] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.594262] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5858568f-c643-4d4b-851d-c005c88271b0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.617566] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk or device None with type streamOptimized {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.618080] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97bcf70e-3d8c-4ecf-8a1a-668be9d4d856 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.639667] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 925.639667] env[62585]: value = "task-1384934" [ 925.639667] env[62585]: _type = "Task" [ 925.639667] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.648941] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384934, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.692288] env[62585]: DEBUG oslo_concurrency.lockutils [None req-cd482e8a-560d-43df-93fc-00ecc56ef933 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-ddb1103d-a846-4229-b441-de45424b4ec9-4b5af1c5-20c1-446c-aad5-023ac683f7e8" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.378s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.718420] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.789266] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 925.789266] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 925.789266] env[62585]: _type = "HttpNfcLease" [ 925.789266] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 925.813882] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b92888-f4a8-01f9-9ba9-020698fbbefc/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 925.814998] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38228a1-d708-4f25-881d-7a9702e7f575 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.822932] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b92888-f4a8-01f9-9ba9-020698fbbefc/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 925.823135] env[62585]: ERROR oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b92888-f4a8-01f9-9ba9-020698fbbefc/disk-0.vmdk due to incomplete transfer. [ 925.823389] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d82b0ffe-29a5-4bf3-a1c0-6f235fbdbad0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.831735] env[62585]: DEBUG oslo_vmware.rw_handles [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b92888-f4a8-01f9-9ba9-020698fbbefc/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 925.831978] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Uploaded image 0a472987-1439-41cb-99c4-bd857ec93b02 to the Glance image server {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 925.834232] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Destroying the VM {{(pid=62585) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 925.834564] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1107f6e1-2578-4e00-911b-10e62bc04e95 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.842920] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 925.842920] env[62585]: value = "task-1384935" [ 925.842920] env[62585]: _type = "Task" [ 925.842920] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.853086] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384935, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.035191] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384930, 'name': ReconfigVM_Task, 'duration_secs': 2.56799} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.035544] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 3abb84ea-b613-4956-a64f-c4ad230343c2/3abb84ea-b613-4956-a64f-c4ad230343c2.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.036283] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-491e96b1-9c00-44fa-91f9-6dc4b108e45e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.043836] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 926.043836] env[62585]: value = "task-1384936" [ 926.043836] env[62585]: _type = "Task" [ 926.043836] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.053844] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384936, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.065550] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5215ee0a-cbee-4aa5-c369-162228f3999f, 'name': SearchDatastore_Task, 'duration_secs': 0.026822} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.068369] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.068587] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.068853] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.069054] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.069308] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.069747] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1621640-13aa-43a3-b16e-40b30d1d030e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.083150] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.083362] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.084165] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e603e2a-13c9-4c64-a71a-fdc020edc58e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.090889] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 926.090889] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d4f3d4-7c39-d934-a33d-86f31869e824" [ 926.090889] env[62585]: _type = "Task" [ 926.090889] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.102611] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d4f3d4-7c39-d934-a33d-86f31869e824, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.151560] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384934, 'name': ReconfigVM_Task, 'duration_secs': 0.370972} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.152181] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 6057e13b-71df-458d-b6ed-c139a8c57836/6057e13b-71df-458d-b6ed-c139a8c57836.vmdk or device None with type streamOptimized {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.152838] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a32ecc79-c62f-479b-a710-aeca5c4d646f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.166094] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 926.166094] env[62585]: value = "task-1384937" [ 926.166094] env[62585]: _type = "Task" [ 926.166094] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.186294] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384937, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.215527] env[62585]: DEBUG nova.compute.manager [req-b5af11ad-6bac-41de-bd90-8884fa72fc90 req-56cac201-c349-4cf3-8318-771e684649a7 service nova] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Received event network-vif-deleted-acdb870f-a3ba-445e-96f3-64fdd59c10a8 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.221263] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32993ddd-8e17-4deb-814e-821a600e0604 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.228326] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f0800f-50be-41a2-af19-ae9e3ddbe149 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.265843] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5952ab35-ab47-419b-bc42-f73887189039 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.277265] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1408c1-5acf-41fa-82a1-c183a2f0e39a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.290283] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 926.290283] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 926.290283] env[62585]: _type = "HttpNfcLease" [ 926.290283] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 926.299376] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 926.299376] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526c89c0-3e3b-e77a-1158-b86a36b9bdd7" [ 926.299376] env[62585]: _type = "HttpNfcLease" [ 926.299376] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 926.300047] env[62585]: DEBUG nova.compute.provider_tree [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.302265] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dd8426-be94-43e0-b102-2c2fdcbafc8b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.310876] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ba3da-d8fe-3787-adfd-19e63d872c09/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 926.311141] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ba3da-d8fe-3787-adfd-19e63d872c09/disk-0.vmdk. {{(pid=62585) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 926.372364] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updated VIF entry in instance network info cache for port 4b5af1c5-20c1-446c-aad5-023ac683f7e8. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 926.372364] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "address": "fa:16:3e:21:aa:89", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5af1c5-20", "ovs_interfaceid": "4b5af1c5-20c1-446c-aad5-023ac683f7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.386226] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0aad7076-6e13-4109-a68c-adb8d50145f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.388531] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384935, 'name': Destroy_Task, 'duration_secs': 0.334206} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.390047] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Destroyed the VM [ 926.390166] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deleting Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 926.391946] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fec95fab-fd56-4042-8813-1024481ff24c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.399887] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 926.399887] env[62585]: value = "task-1384938" [ 926.399887] env[62585]: _type = "Task" [ 926.399887] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.408883] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384938, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.554188] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384936, 'name': Rename_Task, 'duration_secs': 0.313974} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.555467] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.555467] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f2ce055-aa3d-4631-8aae-a000678a43b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.564215] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 926.564215] env[62585]: value = "task-1384939" [ 926.564215] env[62585]: _type = "Task" [ 926.564215] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.576722] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.602693] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d4f3d4-7c39-d934-a33d-86f31869e824, 'name': SearchDatastore_Task, 'duration_secs': 0.024625} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.604407] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83a7e879-70c5-4c20-bd5f-dfe4cb5bc117 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.609607] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 926.609607] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c9f263-87ca-0445-5092-a4da3b58a4da" [ 926.609607] env[62585]: _type = "Task" [ 926.609607] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.621632] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c9f263-87ca-0445-5092-a4da3b58a4da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.676806] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384937, 'name': Rename_Task, 'duration_secs': 0.157617} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.677118] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.677382] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cbdbb94-5cc4-4258-8ea2-07c29bf76f00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.685782] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 926.685782] env[62585]: value = "task-1384940" [ 926.685782] env[62585]: _type = "Task" [ 926.685782] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.696685] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.806421] env[62585]: DEBUG nova.scheduler.client.report [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.877928] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.878282] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Received event network-vif-plugged-52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.878550] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquiring lock "54f542b5-3aba-49d6-a487-62714416b86f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.878746] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Lock "54f542b5-3aba-49d6-a487-62714416b86f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.878923] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Lock "54f542b5-3aba-49d6-a487-62714416b86f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.879132] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] No waiting events found dispatching network-vif-plugged-52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 926.879392] env[62585]: WARNING nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Received unexpected event network-vif-plugged-52550d14-1f84-4991-83fc-b68ce7d6200f for instance with vm_state building and task_state spawning. [ 926.879611] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Received event network-changed-52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.879808] env[62585]: DEBUG nova.compute.manager [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Refreshing instance network info cache due to event network-changed-52550d14-1f84-4991-83fc-b68ce7d6200f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.880101] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquiring lock "refresh_cache-54f542b5-3aba-49d6-a487-62714416b86f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.880311] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Acquired lock "refresh_cache-54f542b5-3aba-49d6-a487-62714416b86f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.880559] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Refreshing network info cache for port 52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.914788] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384938, 'name': RemoveSnapshot_Task, 'duration_secs': 0.330137} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.916113] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deleted Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 926.916473] env[62585]: DEBUG nova.compute.manager [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 926.917324] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abeec90-c1f3-4a09-8cb4-210379cae59e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.971809] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.972161] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.972393] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.972611] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.972830] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.976708] env[62585]: INFO nova.compute.manager [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Terminating instance [ 926.978693] env[62585]: DEBUG nova.compute.manager [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 926.978909] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.979781] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828f40ae-0fe9-4878-bf4b-417fa064e4cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.991219] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 926.993017] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6547d661-65c6-440d-8bd2-0e10eb957dba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.001489] env[62585]: DEBUG oslo_vmware.api [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 927.001489] env[62585]: value = "task-1384941" [ 927.001489] env[62585]: _type = "Task" [ 927.001489] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.011946] env[62585]: DEBUG oslo_vmware.api [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.026215] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "interface-ddb1103d-a846-4229-b441-de45424b4ec9-4b5af1c5-20c1-446c-aad5-023ac683f7e8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.026623] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-ddb1103d-a846-4229-b441-de45424b4ec9-4b5af1c5-20c1-446c-aad5-023ac683f7e8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.078041] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384939, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.124578] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c9f263-87ca-0445-5092-a4da3b58a4da, 'name': SearchDatastore_Task, 'duration_secs': 0.024626} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.124927] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.125352] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 54f542b5-3aba-49d6-a487-62714416b86f/54f542b5-3aba-49d6-a487-62714416b86f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.125772] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-accabd74-f996-491d-b908-49554f766112 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.135399] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 927.135399] env[62585]: value = "task-1384942" [ 927.135399] env[62585]: _type = "Task" [ 927.135399] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.148700] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.198884] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384940, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.311443] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.312394] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 927.315711] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.597s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.315803] env[62585]: DEBUG nova.objects.instance [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lazy-loading 'resources' on Instance uuid 70ac6289-2f14-4fb0-a811-97d76cafc532 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.437731] env[62585]: INFO nova.compute.manager [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Shelve offloading [ 927.440678] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.441405] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb893e2c-3ae6-47b7-a50d-438f17c17d37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.453727] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 927.453727] env[62585]: value = "task-1384943" [ 927.453727] env[62585]: _type = "Task" [ 927.453727] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.468961] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 927.469351] env[62585]: DEBUG nova.compute.manager [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 927.471948] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d149fcd5-07e0-4694-8af1-2184d49d8ffd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.482929] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.483167] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.483357] env[62585]: DEBUG nova.network.neutron [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.515438] env[62585]: DEBUG oslo_vmware.api [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384941, 'name': PowerOffVM_Task, 'duration_secs': 0.254617} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.515827] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.516249] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.516969] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfa66808-88fd-4ab4-afd5-d5aa0599645d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.529579] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.529852] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.533717] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eabd537-fcd8-4c18-b648-a34cb322b83e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.561043] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7ee659-e543-4eaa-a7c3-1f64e14e46db {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.595600] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Reconfiguring VM to detach interface {{(pid=62585) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 927.599881] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9c1e2de-73b9-4175-82c1-bb22f4a2b43d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.623724] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Completed reading data from the image iterator. {{(pid=62585) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 927.624125] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ba3da-d8fe-3787-adfd-19e63d872c09/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 927.624536] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384939, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.624837] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.625048] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.625245] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleting the datastore file [datastore1] fcbbc06c-71fa-4891-8bfc-0de746b9e622 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.626242] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3e13f1-71e7-435a-bcc0-fff3d105b163 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.629790] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-917749ef-e7b2-4512-af63-2ff78b04c72c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.637752] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ba3da-d8fe-3787-adfd-19e63d872c09/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 927.638178] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ba3da-d8fe-3787-adfd-19e63d872c09/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 927.643952] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-50b116df-7a1d-433d-8bb8-0b4bb26adb1b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.645710] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 927.645710] env[62585]: value = "task-1384945" [ 927.645710] env[62585]: _type = "Task" [ 927.645710] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.646958] env[62585]: DEBUG oslo_vmware.api [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 927.646958] env[62585]: value = "task-1384946" [ 927.646958] env[62585]: _type = "Task" [ 927.646958] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.657992] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384942, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.665344] env[62585]: DEBUG oslo_vmware.api [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.669535] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.699051] env[62585]: DEBUG oslo_vmware.api [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1384940, 'name': PowerOnVM_Task, 'duration_secs': 0.632035} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.702027] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.778480] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Updated VIF entry in instance network info cache for port 52550d14-1f84-4991-83fc-b68ce7d6200f. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.778910] env[62585]: DEBUG nova.network.neutron [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Updating instance_info_cache with network_info: [{"id": "52550d14-1f84-4991-83fc-b68ce7d6200f", "address": "fa:16:3e:29:ab:dd", "network": {"id": "8c3bc3f6-1bf0-436b-b7d4-cf0757610bb8", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972774874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19b8936eaf754cbcbd1b099846a3146d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52550d14-1f", "ovs_interfaceid": "52550d14-1f84-4991-83fc-b68ce7d6200f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.808285] env[62585]: DEBUG nova.compute.manager [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 927.809723] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76570ee-2ca5-4c04-b3e0-8e70381ad013 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.820238] env[62585]: DEBUG nova.compute.utils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 927.826155] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 927.826355] env[62585]: DEBUG nova.network.neutron [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.876029] env[62585]: DEBUG nova.policy [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 927.945606] env[62585]: DEBUG oslo_vmware.rw_handles [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ba3da-d8fe-3787-adfd-19e63d872c09/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 927.945606] env[62585]: INFO nova.virt.vmwareapi.images [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Downloaded image file data 310691a0-fca0-4934-a7c2-2e7b96be6e6c [ 927.946463] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def22adc-c7a8-4b46-84e6-33243ba0a661 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.965581] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-877c0700-f061-4ce1-8dd7-7d0205053392 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.004759] env[62585]: INFO nova.virt.vmwareapi.images [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] The imported VM was unregistered [ 928.005980] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Caching image {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 928.006220] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating directory with path [datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.006493] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0301ea17-b26c-4285-b402-f9edd1a7dc2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.044073] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f1c813-4d9f-433f-a3bf-fb7776929173 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.053846] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e9772d-e510-4ab8-9e60-f08da73b041c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.062465] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created directory with path [datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.062705] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d/OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d.vmdk to [datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk. {{(pid=62585) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 928.086437] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d27d140c-87ac-4ad8-a8aa-d74dc78b6ffb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.092169] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee05e43-7cc9-49b7-a973-53acc850a914 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.107070] env[62585]: DEBUG oslo_vmware.api [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384939, 'name': PowerOnVM_Task, 'duration_secs': 1.388678} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.110137] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.110384] env[62585]: INFO nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Took 12.00 seconds to spawn the instance on the hypervisor. [ 928.110596] env[62585]: DEBUG nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 928.110997] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 928.110997] env[62585]: value = "task-1384948" [ 928.110997] env[62585]: _type = "Task" [ 928.110997] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.111768] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7f092c-e538-4d5f-8220-671557083400 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.115599] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3074cc-bfb6-447c-bce5-cb53aa52b38c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.143162] env[62585]: DEBUG nova.compute.provider_tree [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.144556] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384948, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.161879] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613331} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.166734] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 54f542b5-3aba-49d6-a487-62714416b86f/54f542b5-3aba-49d6-a487-62714416b86f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.167049] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.168144] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.171416] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-195b7100-79c4-4d6e-88c1-fd53aaef367a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.173658] env[62585]: DEBUG oslo_vmware.api [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288635} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.173912] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.174110] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.174323] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.174595] env[62585]: INFO nova.compute.manager [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Took 1.20 seconds to destroy the instance on the hypervisor. [ 928.174853] env[62585]: DEBUG oslo.service.loopingcall [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.175460] env[62585]: DEBUG nova.compute.manager [-] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 928.175551] env[62585]: DEBUG nova.network.neutron [-] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.179942] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 928.179942] env[62585]: value = "task-1384949" [ 928.179942] env[62585]: _type = "Task" [ 928.179942] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.190013] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.283563] env[62585]: DEBUG oslo_concurrency.lockutils [req-5617b89a-581a-4523-8606-4c1f0c442ab1 req-43892217-05df-4e1f-bd4a-0152483b0023 service nova] Releasing lock "refresh_cache-54f542b5-3aba-49d6-a487-62714416b86f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.332791] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 928.341756] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d0b8b778-7b1b-4ad1-a373-f3f5adc13368 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.031s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.499027] env[62585]: DEBUG nova.network.neutron [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Successfully created port: 6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.539761] env[62585]: DEBUG nova.network.neutron [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.627241] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384948, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.650615] env[62585]: DEBUG nova.scheduler.client.report [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.659268] env[62585]: INFO nova.compute.manager [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Took 17.41 seconds to build instance. [ 928.668908] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.693022] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067209} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.693573] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.694451] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bafae6-477f-4013-9f9d-a9031aec4dee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.722467] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 54f542b5-3aba-49d6-a487-62714416b86f/54f542b5-3aba-49d6-a487-62714416b86f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.722467] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fb69bc8-cbda-40f5-a45c-2cba9eb2049b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.752416] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 928.752416] env[62585]: value = "task-1384950" [ 928.752416] env[62585]: _type = "Task" [ 928.752416] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.767878] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.798711] env[62585]: DEBUG nova.compute.manager [req-77240e6d-5f74-44d6-b678-e9e1d22781e8 req-35e2ef1b-64ce-468c-a30b-c5636054c8ed service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Received event network-vif-deleted-4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.799075] env[62585]: INFO nova.compute.manager [req-77240e6d-5f74-44d6-b678-e9e1d22781e8 req-35e2ef1b-64ce-468c-a30b-c5636054c8ed service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Neutron deleted interface 4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5; detaching it from the instance and deleting it from the info cache [ 928.799409] env[62585]: DEBUG nova.network.neutron [req-77240e6d-5f74-44d6-b678-e9e1d22781e8 req-35e2ef1b-64ce-468c-a30b-c5636054c8ed service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.044753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.127576] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384948, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.157243] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.165186] env[62585]: DEBUG oslo_concurrency.lockutils [None req-abeb6880-eb87-4e5a-8cc4-b97bff5decb2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.926s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.172652] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.188204] env[62585]: INFO nova.scheduler.client.report [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Deleted allocations for instance 70ac6289-2f14-4fb0-a811-97d76cafc532 [ 929.265894] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.271839] env[62585]: DEBUG nova.network.neutron [-] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.303523] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5419a85-a828-4af8-bf20-09139400b09c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.317747] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f89408-d54e-4cc8-8564-2dbc040e617d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.355218] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 929.357629] env[62585]: DEBUG nova.compute.manager [req-77240e6d-5f74-44d6-b678-e9e1d22781e8 req-35e2ef1b-64ce-468c-a30b-c5636054c8ed service nova] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Detach interface failed, port_id=4e6e9eb6-dbec-4b9b-8a07-aa36038c14d5, reason: Instance fcbbc06c-71fa-4891-8bfc-0de746b9e622 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 929.383577] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 929.383880] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 929.384057] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.384252] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 929.384422] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.384574] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 929.384781] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 929.384942] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 929.385180] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 929.385378] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 929.385588] env[62585]: DEBUG nova.virt.hardware [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 929.386591] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff23b690-29b3-44e8-bdde-bd033de084f5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.390784] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.391835] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc4886b-7026-4991-95e1-f24dacb0c664 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.405751] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946f64e6-13ac-40cc-8935-c0ff73d501eb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.410134] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.410490] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d565db20-336a-4d74-8828-af480f9c3f08 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.540585] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.540791] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.540975] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleting the datastore file [datastore2] abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.541436] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1ae1f24-2854-4ddc-8784-5a1b6ba1467f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.552518] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 929.552518] env[62585]: value = "task-1384952" [ 929.552518] env[62585]: _type = "Task" [ 929.552518] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.564951] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.628687] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384948, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.669140] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.695939] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8ec7898-f41a-4f82-be1c-9c4d380050da tempest-AttachVolumeNegativeTest-1298414395 tempest-AttachVolumeNegativeTest-1298414395-project-member] Lock "70ac6289-2f14-4fb0-a811-97d76cafc532" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.597s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.729421] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "3abb84ea-b613-4956-a64f-c4ad230343c2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.729831] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.730240] env[62585]: DEBUG nova.compute.manager [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 929.731302] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485a218d-2431-4f2f-8988-ba98246d306c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.742662] env[62585]: DEBUG nova.compute.manager [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62585) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 929.743489] env[62585]: DEBUG nova.objects.instance [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'flavor' on Instance uuid 3abb84ea-b613-4956-a64f-c4ad230343c2 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.766151] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384950, 'name': ReconfigVM_Task, 'duration_secs': 0.995879} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.766473] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 54f542b5-3aba-49d6-a487-62714416b86f/54f542b5-3aba-49d6-a487-62714416b86f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.767165] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb0ae351-9f91-443c-ba14-d1f37f30b080 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.774027] env[62585]: INFO nova.compute.manager [-] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Took 1.60 seconds to deallocate network for instance. [ 929.780336] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 929.780336] env[62585]: value = "task-1384953" [ 929.780336] env[62585]: _type = "Task" [ 929.780336] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.792050] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384953, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.065995] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.131351] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384948, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.170328] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.249183] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.249491] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04cc038d-fd57-4abb-be29-a8d36b417bc4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.265032] env[62585]: DEBUG oslo_vmware.api [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 930.265032] env[62585]: value = "task-1384954" [ 930.265032] env[62585]: _type = "Task" [ 930.265032] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.276939] env[62585]: DEBUG oslo_vmware.api [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.282069] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.282354] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.282600] env[62585]: DEBUG nova.objects.instance [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lazy-loading 'resources' on Instance uuid fcbbc06c-71fa-4891-8bfc-0de746b9e622 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.297583] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384953, 'name': Rename_Task, 'duration_secs': 0.391085} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.297910] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.298230] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1cf6f6b-8af2-4788-9fb8-b8b942a8682c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.310129] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 930.310129] env[62585]: value = "task-1384955" [ 930.310129] env[62585]: _type = "Task" [ 930.310129] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.323925] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.413575] env[62585]: DEBUG nova.network.neutron [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Successfully updated port: 6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 930.569063] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.626823] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384948, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.4583} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.627348] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d/OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d.vmdk to [datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk. [ 930.627675] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Cleaning up location [datastore2] OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 930.627958] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_38384489-05c2-4e8a-9347-b435afd85d4d {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.628355] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb65f966-0db0-41d6-b916-3a898b667e44 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.635407] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 930.635407] env[62585]: value = "task-1384957" [ 930.635407] env[62585]: _type = "Task" [ 930.635407] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.644962] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.667146] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.774065] env[62585]: DEBUG oslo_vmware.api [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384954, 'name': PowerOffVM_Task, 'duration_secs': 0.355444} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.774065] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.774065] env[62585]: DEBUG nova.compute.manager [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 930.774457] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a504f42-7050-4e1f-84d3-04ecff60ded1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.822382] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.829188] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-vif-unplugged-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.829863] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.829863] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.829998] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.830205] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] No waiting events found dispatching network-vif-unplugged-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 930.830398] env[62585]: WARNING nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received unexpected event network-vif-unplugged-c32c8966-edf9-44a6-9263-00c85e124ab0 for instance with vm_state shelved and task_state shelving_offloading. [ 930.830511] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-changed-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.830668] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Refreshing instance network info cache due to event network-changed-c32c8966-edf9-44a6-9263-00c85e124ab0. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 930.830858] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.830995] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.831194] env[62585]: DEBUG nova.network.neutron [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Refreshing network info cache for port c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.916762] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-e4edc1dd-52ea-428e-832a-b49d3bc4fe14" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.916906] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-e4edc1dd-52ea-428e-832a-b49d3bc4fe14" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.917070] env[62585]: DEBUG nova.network.neutron [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.949620] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfe585e-63f6-41a7-b146-d84c986ee321 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.960303] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfec17e-7f10-4443-b21c-d63e03545ecd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.992888] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda623f4-c434-41d5-b680-522144903e81 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.000803] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0aea3b-c3af-4c41-85ac-583128aeb37b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.015157] env[62585]: DEBUG nova.compute.provider_tree [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.065108] env[62585]: DEBUG oslo_vmware.api [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.029535} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.065381] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.065572] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.065746] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.085139] env[62585]: INFO nova.scheduler.client.report [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted allocations for instance abf4a205-fcee-46e4-85b6-10a452cc0312 [ 931.146732] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036114} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.146994] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.147187] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.147434] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk to [datastore2] b6186aef-8f4c-409a-83aa-1548545ea7c4/b6186aef-8f4c-409a-83aa-1548545ea7c4.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.147998] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8b69f97-b921-44a7-bd45-932de213ecdd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.155847] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 931.155847] env[62585]: value = "task-1384958" [ 931.155847] env[62585]: _type = "Task" [ 931.155847] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.166711] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.169790] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.287367] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0379328f-a631-4695-99f7-7948ec624322 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.557s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.321504] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.468522] env[62585]: DEBUG nova.network.neutron [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.518769] env[62585]: DEBUG nova.scheduler.client.report [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.589726] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.677193] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384958, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.677553] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.693894] env[62585]: DEBUG nova.network.neutron [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updated VIF entry in instance network info cache for port c32c8966-edf9-44a6-9263-00c85e124ab0. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.694286] env[62585]: DEBUG nova.network.neutron [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": null, "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc32c8966-ed", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.698650] env[62585]: DEBUG nova.network.neutron [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Updating instance_info_cache with network_info: [{"id": "6b248ce1-f858-4267-9e57-0d5110b02dc5", "address": "fa:16:3e:c1:74:dc", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b248ce1-f8", "ovs_interfaceid": "6b248ce1-f858-4267-9e57-0d5110b02dc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.825052] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.025835] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.027024] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.437s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.027243] env[62585]: DEBUG nova.objects.instance [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'resources' on Instance uuid abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.053024] env[62585]: INFO nova.scheduler.client.report [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleted allocations for instance fcbbc06c-71fa-4891-8bfc-0de746b9e622 [ 932.172232] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384958, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.175356] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.178972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "3abb84ea-b613-4956-a64f-c4ad230343c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.178972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.178972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "3abb84ea-b613-4956-a64f-c4ad230343c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.178972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.179244] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.181378] env[62585]: INFO nova.compute.manager [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Terminating instance [ 932.183645] env[62585]: DEBUG nova.compute.manager [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 932.183869] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.184729] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886576e7-7024-4191-b472-6138e918a6ae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.195020] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.195347] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3649e19e-a8c7-4195-a2f6-c586b6f9cf8d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.197691] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.197691] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Received event network-vif-plugged-6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.197857] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Acquiring lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.197954] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.198147] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.198322] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] No waiting events found dispatching network-vif-plugged-6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 932.198489] env[62585]: WARNING nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Received unexpected event network-vif-plugged-6b248ce1-f858-4267-9e57-0d5110b02dc5 for instance with vm_state building and task_state spawning. [ 932.198656] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Received event network-changed-6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.198812] env[62585]: DEBUG nova.compute.manager [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Refreshing instance network info cache due to event network-changed-6b248ce1-f858-4267-9e57-0d5110b02dc5. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 932.199085] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Acquiring lock "refresh_cache-e4edc1dd-52ea-428e-832a-b49d3bc4fe14" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.201550] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-e4edc1dd-52ea-428e-832a-b49d3bc4fe14" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.201835] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Instance network_info: |[{"id": "6b248ce1-f858-4267-9e57-0d5110b02dc5", "address": "fa:16:3e:c1:74:dc", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b248ce1-f8", "ovs_interfaceid": "6b248ce1-f858-4267-9e57-0d5110b02dc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 932.202124] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Acquired lock "refresh_cache-e4edc1dd-52ea-428e-832a-b49d3bc4fe14" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.202328] env[62585]: DEBUG nova.network.neutron [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Refreshing network info cache for port 6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.203528] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:74:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b248ce1-f858-4267-9e57-0d5110b02dc5', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.211022] env[62585]: DEBUG oslo.service.loopingcall [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.214260] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.214829] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd5600ae-34a2-4093-a80b-714e32f81cb6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.239041] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.239041] env[62585]: value = "task-1384960" [ 932.239041] env[62585]: _type = "Task" [ 932.239041] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.250188] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384960, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.286021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.286021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.286021] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleting the datastore file [datastore2] 3abb84ea-b613-4956-a64f-c4ad230343c2 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.286021] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0dd6737-c9f5-4bc7-a651-2e1ae58e1ed7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.298935] env[62585]: DEBUG oslo_vmware.api [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 932.298935] env[62585]: value = "task-1384961" [ 932.298935] env[62585]: _type = "Task" [ 932.298935] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.316085] env[62585]: DEBUG oslo_vmware.api [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.328945] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.499910] env[62585]: DEBUG nova.network.neutron [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Updated VIF entry in instance network info cache for port 6b248ce1-f858-4267-9e57-0d5110b02dc5. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.500414] env[62585]: DEBUG nova.network.neutron [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Updating instance_info_cache with network_info: [{"id": "6b248ce1-f858-4267-9e57-0d5110b02dc5", "address": "fa:16:3e:c1:74:dc", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b248ce1-f8", "ovs_interfaceid": "6b248ce1-f858-4267-9e57-0d5110b02dc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.530366] env[62585]: DEBUG nova.objects.instance [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'numa_topology' on Instance uuid abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.561235] env[62585]: DEBUG oslo_concurrency.lockutils [None req-401f5a01-1bba-44a3-8086-d1884c51dabd tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "fcbbc06c-71fa-4891-8bfc-0de746b9e622" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.589s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.673382] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384958, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.677020] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.754143] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384960, 'name': CreateVM_Task, 'duration_secs': 0.42827} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.754510] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.755090] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.755332] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.755700] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.755980] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d82f6fe8-255c-4627-b1c1-584e03c126a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.763405] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 932.763405] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52902e35-1173-da54-4c22-9f8c563dfdd0" [ 932.763405] env[62585]: _type = "Task" [ 932.763405] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.774407] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52902e35-1173-da54-4c22-9f8c563dfdd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.811837] env[62585]: DEBUG oslo_vmware.api [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.829621] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task} progress is 73%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.003987] env[62585]: DEBUG oslo_concurrency.lockutils [req-f4d60765-3f9f-471d-8297-8b5fd320e1b7 req-a5d4e9c5-ae95-47cf-b39b-ecd0fee45583 service nova] Releasing lock "refresh_cache-e4edc1dd-52ea-428e-832a-b49d3bc4fe14" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.034179] env[62585]: DEBUG nova.objects.base [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 933.173265] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384958, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.176895] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.187647] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.211476] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac03123-84c6-4098-a252-6a6848e0a844 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.223231] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8032e74-4069-4126-8642-cc26f2fd73ca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.257786] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ef502c-f1f7-48d6-b249-06c816b86524 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.272237] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93025d7d-beea-47ba-8e9c-a070e79b9906 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.281879] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52902e35-1173-da54-4c22-9f8c563dfdd0, 'name': SearchDatastore_Task, 'duration_secs': 0.014405} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.282741] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.282996] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.283321] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.283506] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.283748] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.284160] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b311d6c0-2f37-449f-abff-cf8222361d44 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.299703] env[62585]: DEBUG nova.compute.provider_tree [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.303701] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.303965] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.308870] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46fe2f04-8613-46fb-9b76-189ef65531de {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.325438] env[62585]: DEBUG oslo_vmware.api [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.327669] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 933.327669] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e29026-b83f-3a70-b042-19a21c634a80" [ 933.327669] env[62585]: _type = "Task" [ 933.327669] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.337112] env[62585]: DEBUG oslo_vmware.api [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384955, 'name': PowerOnVM_Task, 'duration_secs': 2.833798} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.340595] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.340848] env[62585]: INFO nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Took 12.40 seconds to spawn the instance on the hypervisor. [ 933.340907] env[62585]: DEBUG nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 933.341198] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e29026-b83f-3a70-b042-19a21c634a80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.341990] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec94615-5382-4cef-9675-d931e33e133d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.667882] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384958, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.322527} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.670745] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/310691a0-fca0-4934-a7c2-2e7b96be6e6c/310691a0-fca0-4934-a7c2-2e7b96be6e6c.vmdk to [datastore2] b6186aef-8f4c-409a-83aa-1548545ea7c4/b6186aef-8f4c-409a-83aa-1548545ea7c4.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.671500] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c65c56a-cd75-410a-8a06-24ed791876ae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.678880] env[62585]: DEBUG oslo_vmware.api [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384945, 'name': ReconfigVM_Task, 'duration_secs': 5.893078} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.687773] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.687989] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Reconfigured VM to detach interface {{(pid=62585) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 933.690167] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "2cf85b78-df04-40d0-a7db-5e8979574d0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.690438] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.690669] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "2cf85b78-df04-40d0-a7db-5e8979574d0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.690881] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.691080] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.700649] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] b6186aef-8f4c-409a-83aa-1548545ea7c4/b6186aef-8f4c-409a-83aa-1548545ea7c4.vmdk or device None with type streamOptimized {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.701193] env[62585]: INFO nova.compute.manager [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Terminating instance [ 933.702569] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5abd33f9-e156-40d2-b208-073d0db10b30 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.717175] env[62585]: DEBUG nova.compute.manager [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 933.717385] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 933.718723] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e25539b-ea24-4511-84bd-e4e284bc2b7f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.727221] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.727530] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-757b162c-b6f3-4368-a935-ad9b14f5f9a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.730067] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 933.730067] env[62585]: value = "task-1384962" [ 933.730067] env[62585]: _type = "Task" [ 933.730067] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.734442] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 933.734442] env[62585]: value = "task-1384963" [ 933.734442] env[62585]: _type = "Task" [ 933.734442] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.740749] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384962, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.747635] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.806504] env[62585]: DEBUG nova.scheduler.client.report [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.819707] env[62585]: DEBUG oslo_vmware.api [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1384961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.22145} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.819836] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.820388] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.820591] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.820765] env[62585]: INFO nova.compute.manager [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Took 1.64 seconds to destroy the instance on the hypervisor. [ 933.821011] env[62585]: DEBUG oslo.service.loopingcall [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.821930] env[62585]: DEBUG nova.compute.manager [-] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 933.822040] env[62585]: DEBUG nova.network.neutron [-] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.839318] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e29026-b83f-3a70-b042-19a21c634a80, 'name': SearchDatastore_Task, 'duration_secs': 0.019091} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.840183] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46d1b51a-ab8e-4435-9412-17935792795d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.846985] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 933.846985] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52956704-56dd-f1c0-5e61-0e8bd5b0fcd5" [ 933.846985] env[62585]: _type = "Task" [ 933.846985] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.870758] env[62585]: INFO nova.compute.manager [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Took 20.70 seconds to build instance. [ 933.873648] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52956704-56dd-f1c0-5e61-0e8bd5b0fcd5, 'name': SearchDatastore_Task} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.875901] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.876285] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] e4edc1dd-52ea-428e-832a-b49d3bc4fe14/e4edc1dd-52ea-428e-832a-b49d3bc4fe14.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 933.876436] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02e97a1c-9168-4d20-8cce-a4c907139aec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.885923] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 933.885923] env[62585]: value = "task-1384964" [ 933.885923] env[62585]: _type = "Task" [ 933.885923] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.898264] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.253343] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.257052] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384963, 'name': PowerOffVM_Task, 'duration_secs': 0.445859} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.257451] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.257671] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.257985] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d50b8d3-e35d-45ef-9a3b-0c1dd0a6811c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.311716] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.284s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.374915] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7e509361-7aeb-4b22-a86e-bf8415a278ca tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "54f542b5-3aba-49d6-a487-62714416b86f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.225s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.400301] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384964, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.402106] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.402393] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.402649] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleting the datastore file [datastore1] 2cf85b78-df04-40d0-a7db-5e8979574d0a {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.402966] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8b4dc28-15b1-45b3-907a-07222b7ef9ad {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.412952] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 934.412952] env[62585]: value = "task-1384966" [ 934.412952] env[62585]: _type = "Task" [ 934.412952] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.423975] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.663379] env[62585]: DEBUG nova.compute.manager [req-e32595d9-e0c5-4a82-811a-c9a58ad29c7c req-1c4a99ed-48a5-46b5-891d-67e07282a350 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Received event network-vif-deleted-1f9fc1f2-9662-4b22-be29-c0eb753bfbe5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.663901] env[62585]: INFO nova.compute.manager [req-e32595d9-e0c5-4a82-811a-c9a58ad29c7c req-1c4a99ed-48a5-46b5-891d-67e07282a350 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Neutron deleted interface 1f9fc1f2-9662-4b22-be29-c0eb753bfbe5; detaching it from the instance and deleting it from the info cache [ 934.663901] env[62585]: DEBUG nova.network.neutron [req-e32595d9-e0c5-4a82-811a-c9a58ad29c7c req-1c4a99ed-48a5-46b5-891d-67e07282a350 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.747257] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384962, 'name': ReconfigVM_Task, 'duration_secs': 0.581142} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.747257] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Reconfigured VM instance instance-00000059 to attach disk [datastore2] b6186aef-8f4c-409a-83aa-1548545ea7c4/b6186aef-8f4c-409a-83aa-1548545ea7c4.vmdk or device None with type streamOptimized {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.747257] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-216acbf9-589c-48b6-9510-913f30e2c451 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.757037] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 934.757037] env[62585]: value = "task-1384967" [ 934.757037] env[62585]: _type = "Task" [ 934.757037] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.768349] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384967, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.831166] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4f37d661-de01-41be-a437-fcf9e5029a84 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.303s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.831166] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.642s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.831166] env[62585]: INFO nova.compute.manager [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Unshelving [ 934.902021] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600405} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.902021] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] e4edc1dd-52ea-428e-832a-b49d3bc4fe14/e4edc1dd-52ea-428e-832a-b49d3bc4fe14.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 934.902021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 934.902021] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9e03a5c-b166-43f3-9775-367a211bab5a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.908743] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 934.908743] env[62585]: value = "task-1384968" [ 934.908743] env[62585]: _type = "Task" [ 934.908743] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.919553] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384968, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.925823] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.056203] env[62585]: DEBUG nova.network.neutron [-] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.170916] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d647d0cd-6292-43d0-bd4a-2e81245f59c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.181400] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ba2c27-49ed-4416-9907-66fe435cfc04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.211382] env[62585]: DEBUG nova.compute.manager [req-e32595d9-e0c5-4a82-811a-c9a58ad29c7c req-1c4a99ed-48a5-46b5-891d-67e07282a350 service nova] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Detach interface failed, port_id=1f9fc1f2-9662-4b22-be29-c0eb753bfbe5, reason: Instance 3abb84ea-b613-4956-a64f-c4ad230343c2 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 935.265384] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384967, 'name': Rename_Task, 'duration_secs': 0.348961} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.265671] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.265925] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ee0364d-6d5e-460f-87eb-b7eac24be545 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.273571] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 935.273571] env[62585]: value = "task-1384969" [ 935.273571] env[62585]: _type = "Task" [ 935.273571] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.283873] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.422795] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384968, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174102} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.425822] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.426173] env[62585]: DEBUG oslo_vmware.api [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.661662} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.426918] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58cf936-49c4-4148-b9cc-f87398a68551 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.429360] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.429494] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.429625] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.429805] env[62585]: INFO nova.compute.manager [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Took 1.71 seconds to destroy the instance on the hypervisor. [ 935.430096] env[62585]: DEBUG oslo.service.loopingcall [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.430271] env[62585]: DEBUG nova.compute.manager [-] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 935.430366] env[62585]: DEBUG nova.network.neutron [-] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.453054] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] e4edc1dd-52ea-428e-832a-b49d3bc4fe14/e4edc1dd-52ea-428e-832a-b49d3bc4fe14.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.453434] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73d40972-7603-4679-bff8-266e0394836f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.476317] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 935.476317] env[62585]: value = "task-1384970" [ 935.476317] env[62585]: _type = "Task" [ 935.476317] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.486055] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.513013] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.513444] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "ddb1103d-a846-4229-b441-de45424b4ec9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.513607] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.513820] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.514046] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "ddb1103d-a846-4229-b441-de45424b4ec9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.517051] env[62585]: INFO nova.compute.manager [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Terminating instance [ 935.522010] env[62585]: DEBUG nova.compute.manager [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 935.522281] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.523209] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7601a4e9-4af0-41f7-94b0-d5514edd1f9f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.538166] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.538166] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8151fd1a-ac5e-47bc-b856-ae2200af5209 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.546721] env[62585]: DEBUG oslo_vmware.api [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 935.546721] env[62585]: value = "task-1384971" [ 935.546721] env[62585]: _type = "Task" [ 935.546721] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.558881] env[62585]: INFO nova.compute.manager [-] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Took 1.74 seconds to deallocate network for instance. [ 935.558881] env[62585]: DEBUG oslo_vmware.api [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.574112] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.574112] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquired lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.574112] env[62585]: DEBUG nova.network.neutron [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.785736] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384969, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.863019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.863019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.863019] env[62585]: DEBUG nova.objects.instance [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'pci_requests' on Instance uuid abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.987769] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384970, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.034228] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "54f542b5-3aba-49d6-a487-62714416b86f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.034602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "54f542b5-3aba-49d6-a487-62714416b86f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.034933] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "54f542b5-3aba-49d6-a487-62714416b86f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.035083] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "54f542b5-3aba-49d6-a487-62714416b86f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.035294] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "54f542b5-3aba-49d6-a487-62714416b86f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.037862] env[62585]: INFO nova.compute.manager [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Terminating instance [ 936.040301] env[62585]: DEBUG nova.compute.manager [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 936.040612] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.041829] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a3c684-0bbe-4b2d-a444-6d25a655a91c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.053810] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.054441] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-548ef2b8-5c43-49fa-91ea-724ee4ec66f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.059460] env[62585]: DEBUG oslo_vmware.api [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384971, 'name': PowerOffVM_Task, 'duration_secs': 0.325809} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.059735] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.059912] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.060631] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42621aa3-2199-47f3-aa93-5d9d3a5c0a51 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.062818] env[62585]: DEBUG oslo_vmware.api [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 936.062818] env[62585]: value = "task-1384972" [ 936.062818] env[62585]: _type = "Task" [ 936.062818] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.066789] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.073783] env[62585]: DEBUG oslo_vmware.api [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.166561] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.166561] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.166561] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleting the datastore file [datastore2] ddb1103d-a846-4229-b441-de45424b4ec9 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.166561] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa75b6bb-e1c8-416a-bb07-dfd5798cad2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.174213] env[62585]: DEBUG oslo_vmware.api [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 936.174213] env[62585]: value = "task-1384974" [ 936.174213] env[62585]: _type = "Task" [ 936.174213] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.204397] env[62585]: DEBUG oslo_vmware.api [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.285964] env[62585]: DEBUG oslo_vmware.api [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384969, 'name': PowerOnVM_Task, 'duration_secs': 0.85211} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.288469] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.288677] env[62585]: INFO nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Took 17.76 seconds to spawn the instance on the hypervisor. [ 936.288861] env[62585]: DEBUG nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 936.289209] env[62585]: DEBUG nova.network.neutron [-] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.290827] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1df1601-9fa9-4b12-9193-d941a6045871 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.365703] env[62585]: DEBUG nova.objects.instance [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'numa_topology' on Instance uuid abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.407728] env[62585]: INFO nova.network.neutron [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Port 4b5af1c5-20c1-446c-aad5-023ac683f7e8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 936.408227] env[62585]: DEBUG nova.network.neutron [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [{"id": "b29379d9-a516-40cd-b7f0-35505b917bcb", "address": "fa:16:3e:1c:50:ee", "network": {"id": "19cc8f17-f362-4186-901c-3dc61c1ef3e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1660902429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48929b5f0c2c41ddade223ab57002fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb29379d9-a5", "ovs_interfaceid": "b29379d9-a516-40cd-b7f0-35505b917bcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.487795] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384970, 'name': ReconfigVM_Task, 'duration_secs': 0.519101} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.488129] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Reconfigured VM instance instance-0000005b to attach disk [datastore1] e4edc1dd-52ea-428e-832a-b49d3bc4fe14/e4edc1dd-52ea-428e-832a-b49d3bc4fe14.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.488769] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1fdcb40-620a-49ef-9979-a4931829d72e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.495814] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 936.495814] env[62585]: value = "task-1384975" [ 936.495814] env[62585]: _type = "Task" [ 936.495814] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.503639] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384975, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.573243] env[62585]: DEBUG oslo_vmware.api [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384972, 'name': PowerOffVM_Task, 'duration_secs': 0.200257} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.573477] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.573649] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.573904] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-278a4e09-6f57-498a-8e1e-42cfb02c2114 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.666609] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.666836] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.667034] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleting the datastore file [datastore2] 54f542b5-3aba-49d6-a487-62714416b86f {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.667377] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa9ffdba-82a9-4363-ae85-11cdc14bd82e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.675047] env[62585]: DEBUG oslo_vmware.api [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for the task: (returnval){ [ 936.675047] env[62585]: value = "task-1384977" [ 936.675047] env[62585]: _type = "Task" [ 936.675047] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.687815] env[62585]: DEBUG oslo_vmware.api [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167379} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.692094] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.692318] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.692505] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.692677] env[62585]: INFO nova.compute.manager [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 936.692913] env[62585]: DEBUG oslo.service.loopingcall [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.693146] env[62585]: DEBUG oslo_vmware.api [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.694177] env[62585]: DEBUG nova.compute.manager [req-ba5a5d30-60b3-49eb-950f-39802a9c0e2c req-15aef2ad-4d58-41de-8b0a-6686560c4144 service nova] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Received event network-vif-deleted-57d68249-852c-488a-ac40-8de77dec7712 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.694724] env[62585]: DEBUG nova.compute.manager [-] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 936.694854] env[62585]: DEBUG nova.network.neutron [-] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.796000] env[62585]: INFO nova.compute.manager [-] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Took 1.37 seconds to deallocate network for instance. [ 936.816705] env[62585]: INFO nova.compute.manager [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Took 23.93 seconds to build instance. [ 936.867856] env[62585]: INFO nova.compute.claims [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.913115] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Releasing lock "refresh_cache-ddb1103d-a846-4229-b441-de45424b4ec9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.007889] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384975, 'name': Rename_Task, 'duration_secs': 0.235857} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.007889] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.007889] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b938eae8-162b-4c88-be64-5356045f6a63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.017060] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 937.017060] env[62585]: value = "task-1384978" [ 937.017060] env[62585]: _type = "Task" [ 937.017060] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.024097] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.188723] env[62585]: DEBUG oslo_vmware.api [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Task: {'id': task-1384977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153672} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.188987] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.189325] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.189571] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.189732] env[62585]: INFO nova.compute.manager [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 937.189923] env[62585]: DEBUG oslo.service.loopingcall [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.190138] env[62585]: DEBUG nova.compute.manager [-] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 937.190247] env[62585]: DEBUG nova.network.neutron [-] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.303021] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.322372] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b8ee1bad-a70e-46d3-9452-1bbd2177c79b tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.441s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.418459] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b051a57-c870-44b5-81f8-a5a2d59d2e95 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "interface-ddb1103d-a846-4229-b441-de45424b4ec9-4b5af1c5-20c1-446c-aad5-023ac683f7e8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.392s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.524999] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384978, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.531751] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "b6186aef-8f4c-409a-83aa-1548545ea7c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.532010] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.532231] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "b6186aef-8f4c-409a-83aa-1548545ea7c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.532434] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.532585] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.535812] env[62585]: INFO nova.compute.manager [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Terminating instance [ 937.537684] env[62585]: DEBUG nova.compute.manager [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 937.537888] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.538696] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4c3cdc-dee4-4ab3-86c8-ed8cc30e1924 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.552272] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.552542] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bdb4417-4b91-4c8a-8a8e-5b51003765c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.561367] env[62585]: DEBUG oslo_vmware.api [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 937.561367] env[62585]: value = "task-1384979" [ 937.561367] env[62585]: _type = "Task" [ 937.561367] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.570848] env[62585]: DEBUG oslo_vmware.api [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.025791] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384978, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.038616] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a29d27-09e7-4eba-b850-f309fddb868a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.047407] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cab270-50ac-493f-8215-3d35d25f4340 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.084657] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86773d80-2ad7-4ca5-9048-b7645c82ac01 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.093052] env[62585]: DEBUG oslo_vmware.api [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384979, 'name': PowerOffVM_Task, 'duration_secs': 0.210181} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.095242] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.095425] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.095702] env[62585]: DEBUG nova.network.neutron [-] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.096894] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aadf472a-e5fd-4f4c-a29d-17bc294cfe23 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.101025] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7662539-ac40-47c4-921c-04eab83c619f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.115497] env[62585]: DEBUG nova.compute.provider_tree [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.174982] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.175251] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.175449] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleting the datastore file [datastore2] b6186aef-8f4c-409a-83aa-1548545ea7c4 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.175728] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0445d0b8-747a-4ded-84ba-f4e8aa8171e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.183319] env[62585]: DEBUG oslo_vmware.api [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 938.183319] env[62585]: value = "task-1384981" [ 938.183319] env[62585]: _type = "Task" [ 938.183319] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.196017] env[62585]: DEBUG oslo_vmware.api [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.218626] env[62585]: DEBUG nova.network.neutron [-] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.525292] env[62585]: DEBUG oslo_vmware.api [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1384978, 'name': PowerOnVM_Task, 'duration_secs': 1.096486} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.525567] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.525775] env[62585]: INFO nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Took 9.17 seconds to spawn the instance on the hypervisor. [ 938.525959] env[62585]: DEBUG nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.526833] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716bd077-eac1-4dda-9a6d-62f0bd7bcd43 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.604420] env[62585]: INFO nova.compute.manager [-] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Took 1.41 seconds to deallocate network for instance. [ 938.618485] env[62585]: DEBUG nova.scheduler.client.report [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.693834] env[62585]: DEBUG oslo_vmware.api [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136325} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.694155] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.694353] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.694537] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.694717] env[62585]: INFO nova.compute.manager [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Took 1.16 seconds to destroy the instance on the hypervisor. [ 938.694959] env[62585]: DEBUG oslo.service.loopingcall [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.695184] env[62585]: DEBUG nova.compute.manager [-] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 938.695277] env[62585]: DEBUG nova.network.neutron [-] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.716384] env[62585]: DEBUG nova.compute.manager [req-1f18abe6-9fa4-4270-9b23-2ba9b6201abe req-6a98755f-93cc-4061-9387-7c8a69371893 service nova] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Received event network-vif-deleted-52550d14-1f84-4991-83fc-b68ce7d6200f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.716593] env[62585]: DEBUG nova.compute.manager [req-1f18abe6-9fa4-4270-9b23-2ba9b6201abe req-6a98755f-93cc-4061-9387-7c8a69371893 service nova] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Received event network-vif-deleted-b29379d9-a516-40cd-b7f0-35505b917bcb {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.721649] env[62585]: INFO nova.compute.manager [-] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Took 2.03 seconds to deallocate network for instance. [ 939.042911] env[62585]: INFO nova.compute.manager [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Took 14.05 seconds to build instance. [ 939.111727] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.124034] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.261s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.125547] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.059s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.125837] env[62585]: DEBUG nova.objects.instance [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'resources' on Instance uuid 3abb84ea-b613-4956-a64f-c4ad230343c2 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.165401] env[62585]: INFO nova.network.neutron [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating port c32c8966-edf9-44a6-9263-00c85e124ab0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 939.228384] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.412015] env[62585]: DEBUG nova.network.neutron [-] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.544956] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fd2f3ad0-60b3-4a30-a5ba-116ff8da717d tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.564s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.767229] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053c122f-5fef-4d30-9a2f-8fd9dfbae30b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.776227] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8e5a6e-9f5d-4587-a640-f29d4448f616 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.809985] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e37eb6-9b34-4bd3-83d3-bddb433cdb7e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.818480] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d14a844-a694-46ea-a5ad-adc0cf1d1bbd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.833135] env[62585]: DEBUG nova.compute.provider_tree [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.914822] env[62585]: INFO nova.compute.manager [-] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Took 1.22 seconds to deallocate network for instance. [ 940.336649] env[62585]: DEBUG nova.scheduler.client.report [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.376324] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "c8f23f36-b035-467e-959a-37fc0b6462ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.376324] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.422032] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.747683] env[62585]: DEBUG nova.compute.manager [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Received event network-vif-deleted-5fe6ba89-b6bd-4982-bac0-2c93b5697204 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.747938] env[62585]: DEBUG nova.compute.manager [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-vif-plugged-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.748195] env[62585]: DEBUG oslo_concurrency.lockutils [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.748470] env[62585]: DEBUG oslo_concurrency.lockutils [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.748680] env[62585]: DEBUG oslo_concurrency.lockutils [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.748878] env[62585]: DEBUG nova.compute.manager [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] No waiting events found dispatching network-vif-plugged-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.749106] env[62585]: WARNING nova.compute.manager [req-d8d62529-da00-4aae-94d9-f4253400b863 req-a2b53ba1-c1b7-4bc8-a856-5b602d9c9731 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received unexpected event network-vif-plugged-c32c8966-edf9-44a6-9263-00c85e124ab0 for instance with vm_state shelved_offloaded and task_state spawning. [ 940.765930] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.766145] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.766354] env[62585]: DEBUG nova.network.neutron [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.843025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.845087] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.542s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.845326] env[62585]: DEBUG nova.objects.instance [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lazy-loading 'resources' on Instance uuid 2cf85b78-df04-40d0-a7db-5e8979574d0a {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.865778] env[62585]: INFO nova.scheduler.client.report [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocations for instance 3abb84ea-b613-4956-a64f-c4ad230343c2 [ 940.877996] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 941.373306] env[62585]: DEBUG oslo_concurrency.lockutils [None req-63c7e021-84bb-405d-b6ba-40db67a3327e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "3abb84ea-b613-4956-a64f-c4ad230343c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.195s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.395216] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.487800] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b612096-189d-4a07-bf54-c447148be973 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.496361] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc1f977-87ee-4e44-b859-958150489220 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.526593] env[62585]: DEBUG nova.network.neutron [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.529022] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7616569e-619b-4d62-8d9a-c2d4a2f6854b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.536309] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f5a409-67c8-4037-b0c0-80bd174b558b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.550335] env[62585]: DEBUG nova.compute.provider_tree [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.034261] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.053361] env[62585]: DEBUG nova.scheduler.client.report [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.058995] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1020dbc2cdc11d6b8091548439ff1d5d',container_format='bare',created_at=2024-10-31T10:06:06Z,direct_url=,disk_format='vmdk',id=0a472987-1439-41cb-99c4-bd857ec93b02,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-498840906-shelved',owner='34c6f21d288e47dd94ccbe12526fe4e8',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-10-31T10:06:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.059281] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.059470] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.059682] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.059854] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.060040] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.060388] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.060589] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.060788] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.061338] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.061338] env[62585]: DEBUG nova.virt.hardware [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.062260] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecb04ee-b26a-4da2-aa36-4cd077f002af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.071794] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1c6232-4c9e-4109-a731-02ed59b8440b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.086762] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:f5:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c32c8966-edf9-44a6-9263-00c85e124ab0', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.094106] env[62585]: DEBUG oslo.service.loopingcall [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.094387] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.094944] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a964f90-f0de-4e01-baf1-cba79a3fdd85 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.116655] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.116655] env[62585]: value = "task-1384982" [ 942.116655] env[62585]: _type = "Task" [ 942.116655] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.124877] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384982, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.517029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "a26fb190-e6e6-48ab-a1d6-c662421a965f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.517029] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.566548] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.569052] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.457s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.569326] env[62585]: DEBUG nova.objects.instance [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lazy-loading 'resources' on Instance uuid 54f542b5-3aba-49d6-a487-62714416b86f {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.590366] env[62585]: INFO nova.scheduler.client.report [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleted allocations for instance 2cf85b78-df04-40d0-a7db-5e8979574d0a [ 942.627440] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1384982, 'name': CreateVM_Task, 'duration_secs': 0.347895} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.627643] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.628317] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.628497] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.628873] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 942.629143] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b3b4f16-37d5-4772-9e20-b15a28700a4d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.634962] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 942.634962] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528c5381-130a-cd90-2979-61ecc460b6b2" [ 942.634962] env[62585]: _type = "Task" [ 942.634962] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.643685] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]528c5381-130a-cd90-2979-61ecc460b6b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.777385] env[62585]: DEBUG nova.compute.manager [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-changed-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.777528] env[62585]: DEBUG nova.compute.manager [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Refreshing instance network info cache due to event network-changed-c32c8966-edf9-44a6-9263-00c85e124ab0. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.777762] env[62585]: DEBUG oslo_concurrency.lockutils [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.777853] env[62585]: DEBUG oslo_concurrency.lockutils [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.778166] env[62585]: DEBUG nova.network.neutron [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Refreshing network info cache for port c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.018320] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 943.097877] env[62585]: DEBUG oslo_concurrency.lockutils [None req-da96c42d-5aaf-433e-bfee-b3871d9851c8 tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "2cf85b78-df04-40d0-a7db-5e8979574d0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.407s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.149089] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.149362] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Processing image 0a472987-1439-41cb-99c4-bd857ec93b02 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.149653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.149746] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.149922] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.150180] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cbbccfc-b5d8-4da3-9603-69043a742408 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.162472] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.162678] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.165663] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e103fbcd-8d4e-4793-b6d3-561ecfcb6ef9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.174040] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 943.174040] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]521c6699-1896-4869-f1ca-993216b0f506" [ 943.174040] env[62585]: _type = "Task" [ 943.174040] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.184693] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]521c6699-1896-4869-f1ca-993216b0f506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.226690] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de283b-0ce6-446c-870d-73ed9fe2e36c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.235234] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c751485-e4da-44d0-8e27-200cebda6718 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.266600] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34ce5b3-4fec-4a31-af48-0265e83db16b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.275369] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7df67d-f0b7-4465-9d8d-17ad407a85ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.290577] env[62585]: DEBUG nova.compute.provider_tree [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.481551] env[62585]: DEBUG nova.network.neutron [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updated VIF entry in instance network info cache for port c32c8966-edf9-44a6-9263-00c85e124ab0. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.482051] env[62585]: DEBUG nova.network.neutron [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.538356] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.628500] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.628731] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.628947] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "4b080cc3-e1cc-4b64-9926-c37b891444f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.629145] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.629316] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.631590] env[62585]: INFO nova.compute.manager [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Terminating instance [ 943.633370] env[62585]: DEBUG nova.compute.manager [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 943.633570] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.634410] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff6d426-4c77-4e01-bf13-10eee4194ed1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.642272] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.642497] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac8f4eae-8c35-46a8-90d8-a204a72cbc5c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.650708] env[62585]: DEBUG oslo_vmware.api [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 943.650708] env[62585]: value = "task-1384983" [ 943.650708] env[62585]: _type = "Task" [ 943.650708] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.658734] env[62585]: DEBUG oslo_vmware.api [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.686021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Preparing fetch location {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 943.686021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Fetch image to [datastore1] OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c/OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c.vmdk {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 943.686021] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Downloading stream optimized image 0a472987-1439-41cb-99c4-bd857ec93b02 to [datastore1] OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c/OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c.vmdk on the data store datastore1 as vApp {{(pid=62585) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 943.686021] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Downloading image file data 0a472987-1439-41cb-99c4-bd857ec93b02 to the ESX as VM named 'OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c' {{(pid=62585) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 943.766980] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 943.766980] env[62585]: value = "resgroup-9" [ 943.766980] env[62585]: _type = "ResourcePool" [ 943.766980] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 943.767674] env[62585]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b2130c82-81d3-4761-816f-f57c4f2a5478 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.794786] env[62585]: DEBUG nova.scheduler.client.report [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.799964] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lease: (returnval){ [ 943.799964] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5208d402-0f05-b41b-d70b-da5849c0ca61" [ 943.799964] env[62585]: _type = "HttpNfcLease" [ 943.799964] env[62585]: } obtained for vApp import into resource pool (val){ [ 943.799964] env[62585]: value = "resgroup-9" [ 943.799964] env[62585]: _type = "ResourcePool" [ 943.799964] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 943.800579] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the lease: (returnval){ [ 943.800579] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5208d402-0f05-b41b-d70b-da5849c0ca61" [ 943.800579] env[62585]: _type = "HttpNfcLease" [ 943.800579] env[62585]: } to be ready. {{(pid=62585) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 943.808216] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.808216] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5208d402-0f05-b41b-d70b-da5849c0ca61" [ 943.808216] env[62585]: _type = "HttpNfcLease" [ 943.808216] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 943.984545] env[62585]: DEBUG oslo_concurrency.lockutils [req-12faabaf-616e-413a-abb6-9b4d89219726 req-1304d13e-c51a-49c2-8001-af748840e4c8 service nova] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.161371] env[62585]: DEBUG oslo_vmware.api [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384983, 'name': PowerOffVM_Task, 'duration_secs': 0.198267} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.161667] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.161843] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.162120] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4c4c419-9add-4d79-a1b1-62539f85c841 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.231894] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.232164] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.232358] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleting the datastore file [datastore1] 4b080cc3-e1cc-4b64-9926-c37b891444f5 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.232643] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cee269ad-bb2c-435e-a7fb-b6f945f363fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.238898] env[62585]: DEBUG oslo_vmware.api [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for the task: (returnval){ [ 944.238898] env[62585]: value = "task-1384986" [ 944.238898] env[62585]: _type = "Task" [ 944.238898] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.247543] env[62585]: DEBUG oslo_vmware.api [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384986, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.305432] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.308045] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.079s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.308045] env[62585]: DEBUG nova.objects.instance [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lazy-loading 'resources' on Instance uuid ddb1103d-a846-4229-b441-de45424b4ec9 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.313880] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.313880] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5208d402-0f05-b41b-d70b-da5849c0ca61" [ 944.313880] env[62585]: _type = "HttpNfcLease" [ 944.313880] env[62585]: } is initializing. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 944.326423] env[62585]: INFO nova.scheduler.client.report [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Deleted allocations for instance 54f542b5-3aba-49d6-a487-62714416b86f [ 944.749759] env[62585]: DEBUG oslo_vmware.api [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Task: {'id': task-1384986, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220127} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.753632] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.753632] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.753632] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.753632] env[62585]: INFO nova.compute.manager [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 944.753632] env[62585]: DEBUG oslo.service.loopingcall [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.753632] env[62585]: DEBUG nova.compute.manager [-] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 944.753632] env[62585]: DEBUG nova.network.neutron [-] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.809502] env[62585]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.809502] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5208d402-0f05-b41b-d70b-da5849c0ca61" [ 944.809502] env[62585]: _type = "HttpNfcLease" [ 944.809502] env[62585]: } is ready. {{(pid=62585) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 944.809815] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 944.809815] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5208d402-0f05-b41b-d70b-da5849c0ca61" [ 944.809815] env[62585]: _type = "HttpNfcLease" [ 944.809815] env[62585]: }. {{(pid=62585) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 944.812960] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37c35bc-d96c-4cc7-bcae-cc54cf53c279 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.822254] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5236cef1-e729-6e66-fff1-1223b02271d6/disk-0.vmdk from lease info. {{(pid=62585) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 944.822254] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5236cef1-e729-6e66-fff1-1223b02271d6/disk-0.vmdk. {{(pid=62585) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 944.887824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6c0b41c5-0d68-47cf-92f5-132747132f76 tempest-ServerDiskConfigTestJSON-792514113 tempest-ServerDiskConfigTestJSON-792514113-project-member] Lock "54f542b5-3aba-49d6-a487-62714416b86f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.853s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.891752] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-467ae5cd-6bd4-43bc-b986-33343b4e633f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.015612] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e9aab3-9b0d-4d9d-b6d6-89d6af98ca67 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.024015] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1a4b5f-bb8f-4815-90d8-d07cf51978c8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.059973] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3694fe5b-57de-45ed-93a6-7e5eae9608fd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.069506] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adfa5ab-c03f-45b5-a71e-7bd3fd91503d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.086531] env[62585]: DEBUG nova.compute.provider_tree [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.117842] env[62585]: DEBUG nova.compute.manager [req-27a37351-82c3-46c6-8225-a59d339a907f req-e1135f35-02ad-4d99-8657-ffbd7b0b64c9 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Received event network-vif-deleted-144b0acd-f385-4e1c-b8cc-8396bf7f7648 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.118071] env[62585]: INFO nova.compute.manager [req-27a37351-82c3-46c6-8225-a59d339a907f req-e1135f35-02ad-4d99-8657-ffbd7b0b64c9 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Neutron deleted interface 144b0acd-f385-4e1c-b8cc-8396bf7f7648; detaching it from the instance and deleting it from the info cache [ 945.118262] env[62585]: DEBUG nova.network.neutron [req-27a37351-82c3-46c6-8225-a59d339a907f req-e1135f35-02ad-4d99-8657-ffbd7b0b64c9 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.590328] env[62585]: DEBUG nova.scheduler.client.report [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.594920] env[62585]: DEBUG nova.network.neutron [-] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.622317] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aae2efd4-fb51-4bcd-82bb-ad9812bf7150 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.632230] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65699db-2d70-4516-ba8a-01ce003a6ce0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.659195] env[62585]: DEBUG nova.compute.manager [req-27a37351-82c3-46c6-8225-a59d339a907f req-e1135f35-02ad-4d99-8657-ffbd7b0b64c9 service nova] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Detach interface failed, port_id=144b0acd-f385-4e1c-b8cc-8396bf7f7648, reason: Instance 4b080cc3-e1cc-4b64-9926-c37b891444f5 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 946.001882] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Completed reading data from the image iterator. {{(pid=62585) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 946.002192] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5236cef1-e729-6e66-fff1-1223b02271d6/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 946.003164] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9436c0-aef7-4ec1-bccc-a22febee8219 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.011937] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5236cef1-e729-6e66-fff1-1223b02271d6/disk-0.vmdk is in state: ready. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 946.011937] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5236cef1-e729-6e66-fff1-1223b02271d6/disk-0.vmdk. {{(pid=62585) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 946.011937] env[62585]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-61d6dc59-459a-4851-a89e-aa809358c63e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.099124] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.101661] env[62585]: INFO nova.compute.manager [-] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Took 1.35 seconds to deallocate network for instance. [ 946.105186] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.680s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.105186] env[62585]: DEBUG nova.objects.instance [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lazy-loading 'resources' on Instance uuid b6186aef-8f4c-409a-83aa-1548545ea7c4 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.136267] env[62585]: INFO nova.scheduler.client.report [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleted allocations for instance ddb1103d-a846-4229-b441-de45424b4ec9 [ 946.186034] env[62585]: DEBUG oslo_vmware.rw_handles [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5236cef1-e729-6e66-fff1-1223b02271d6/disk-0.vmdk. {{(pid=62585) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 946.186175] env[62585]: INFO nova.virt.vmwareapi.images [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Downloaded image file data 0a472987-1439-41cb-99c4-bd857ec93b02 [ 946.187202] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ce7069-31a9-421d-9303-4a0d038c3434 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.205570] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3654c085-5855-437b-8e38-f8c99cde07b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.231638] env[62585]: INFO nova.virt.vmwareapi.images [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] The imported VM was unregistered [ 946.234184] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Caching image {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 946.234622] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Creating directory with path [datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02 {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.235227] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b55e612-67e9-40ab-ae8e-1dc156eff651 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.245626] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Created directory with path [datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02 {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.245828] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c/OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c.vmdk to [datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk. {{(pid=62585) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 946.246104] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7234f952-9aff-4df2-877a-67e1f68e106b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.252937] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 946.252937] env[62585]: value = "task-1384988" [ 946.252937] env[62585]: _type = "Task" [ 946.252937] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.260802] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384988, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.611277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.646854] env[62585]: DEBUG oslo_concurrency.lockutils [None req-ed1af746-1f30-40e4-a782-baa0078fafc7 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "ddb1103d-a846-4229-b441-de45424b4ec9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.133s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.828525] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0249161-6cc3-49e9-b2b3-fb99339e7522 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.828525] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fea5a9-1f80-4128-b739-049b014e868d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.828525] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384988, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.828525] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7145d04-fbf2-40c8-9489-a4d029ffe60d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.828525] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c983cf-c64e-4636-b45a-3647cb594290 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.828525] env[62585]: DEBUG nova.compute.provider_tree [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.265308] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384988, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.322455] env[62585]: DEBUG nova.scheduler.client.report [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 947.661228] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.662202] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "a634a80e-d90a-4ce3-8233-75657a7754be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.662202] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.662202] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.662202] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "a634a80e-d90a-4ce3-8233-75657a7754be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.664726] env[62585]: INFO nova.compute.manager [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Terminating instance [ 947.666609] env[62585]: DEBUG nova.compute.manager [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 947.666814] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.667682] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6b76b9-396d-4539-9521-0dd928f1972d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.675846] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.676154] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c20cbc74-f7a1-4b9c-9e66-20c3f091662f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.683069] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 947.683069] env[62585]: value = "task-1384989" [ 947.683069] env[62585]: _type = "Task" [ 947.683069] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.691739] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.766298] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384988, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.830511] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.728s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.833064] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.438s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.834719] env[62585]: INFO nova.compute.claims [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.851167] env[62585]: INFO nova.scheduler.client.report [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted allocations for instance b6186aef-8f4c-409a-83aa-1548545ea7c4 [ 948.108020] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "65ed4088-2cc5-4c00-94af-f714ec608fd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.108300] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "65ed4088-2cc5-4c00-94af-f714ec608fd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.193710] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.266606] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384988, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.359654] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6bcf844-9284-429d-95c5-2dca6e10fc90 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b6186aef-8f4c-409a-83aa-1548545ea7c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.827s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.611473] env[62585]: DEBUG nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 948.695220] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384989, 'name': PowerOffVM_Task, 'duration_secs': 0.935282} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.695536] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.695669] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.695912] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d062053e-851b-4e1b-b28e-10e1b73451b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.767451] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384988, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.409978} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.767707] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c/OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c.vmdk to [datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk. [ 948.767900] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Cleaning up location [datastore1] OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 948.768082] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_365c5004-b2ed-407a-a5e7-d6796955ff6c {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.768344] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76bebb22-3be7-42d5-9653-d1c9e1ee22a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.774539] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 948.774539] env[62585]: value = "task-1384991" [ 948.774539] env[62585]: _type = "Task" [ 948.774539] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.782097] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384991, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.785869] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.786088] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.786476] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleting the datastore file [datastore1] a634a80e-d90a-4ce3-8233-75657a7754be {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.786554] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64752743-fe84-47a0-94d9-3a1d3264e411 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.793596] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for the task: (returnval){ [ 948.793596] env[62585]: value = "task-1384992" [ 948.793596] env[62585]: _type = "Task" [ 948.793596] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.801079] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.958778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31c7132-4047-4423-8acb-084028ca225f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.966182] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefc4681-ab95-4e51-9c81-8b23acfe7602 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.997742] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d87f97-bb25-4083-9d98-d465edf1c0a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.005459] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8321da7-c82f-4235-8968-7753ebf6c280 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.018881] env[62585]: DEBUG nova.compute.provider_tree [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.131167] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.285465] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384991, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217679} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.285812] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.285968] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.286305] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk to [datastore1] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.286592] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3e03243-a968-4f9a-aada-b3a59b6e98f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.293494] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 949.293494] env[62585]: value = "task-1384993" [ 949.293494] env[62585]: _type = "Task" [ 949.293494] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.303843] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.307019] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.523430] env[62585]: DEBUG nova.scheduler.client.report [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 949.546114] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "891e5a42-3681-47eb-ac88-015fa21a6580" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.546488] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "891e5a42-3681-47eb-ac88-015fa21a6580" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.546750] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "891e5a42-3681-47eb-ac88-015fa21a6580-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.546986] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "891e5a42-3681-47eb-ac88-015fa21a6580-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.547232] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "891e5a42-3681-47eb-ac88-015fa21a6580-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.551329] env[62585]: INFO nova.compute.manager [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Terminating instance [ 949.553769] env[62585]: DEBUG nova.compute.manager [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 949.554046] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.554980] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82075dec-f7f2-4420-9eca-c34ae97a87a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.564062] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.564306] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d42473c4-2580-4a67-b4bc-5e4f76e0ce6c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.572032] env[62585]: DEBUG oslo_vmware.api [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 949.572032] env[62585]: value = "task-1384994" [ 949.572032] env[62585]: _type = "Task" [ 949.572032] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.579582] env[62585]: DEBUG oslo_vmware.api [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.807089] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384993, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.810277] env[62585]: DEBUG oslo_vmware.api [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Task: {'id': task-1384992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.636706} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.810542] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.810735] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.810918] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.811153] env[62585]: INFO nova.compute.manager [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Took 2.14 seconds to destroy the instance on the hypervisor. [ 949.811417] env[62585]: DEBUG oslo.service.loopingcall [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.811639] env[62585]: DEBUG nova.compute.manager [-] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 949.811722] env[62585]: DEBUG nova.network.neutron [-] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.990135] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "b0885bdd-bc8d-4311-8388-54bdc22144c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.990442] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.027470] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.027996] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 950.030957] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.493s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.032437] env[62585]: INFO nova.compute.claims [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.085293] env[62585]: DEBUG oslo_vmware.api [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384994, 'name': PowerOffVM_Task, 'duration_secs': 0.261987} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.085603] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.085777] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.086312] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de499551-85cb-4bcb-b0e7-3dd70db5243c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.097937] env[62585]: DEBUG nova.compute.manager [req-ac6e5377-4bf2-4527-96b9-32242a24a6be req-c558d303-4422-4ed6-a9f2-65c4aab4e5d0 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Received event network-vif-deleted-840822b3-e947-451f-90bf-03eafebebf95 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.098217] env[62585]: INFO nova.compute.manager [req-ac6e5377-4bf2-4527-96b9-32242a24a6be req-c558d303-4422-4ed6-a9f2-65c4aab4e5d0 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Neutron deleted interface 840822b3-e947-451f-90bf-03eafebebf95; detaching it from the instance and deleting it from the info cache [ 950.098338] env[62585]: DEBUG nova.network.neutron [req-ac6e5377-4bf2-4527-96b9-32242a24a6be req-c558d303-4422-4ed6-a9f2-65c4aab4e5d0 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.156416] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.156777] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.156930] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleting the datastore file [datastore2] 891e5a42-3681-47eb-ac88-015fa21a6580 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.157132] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0414e204-7ece-46d5-87b6-377b22b13286 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.164060] env[62585]: DEBUG oslo_vmware.api [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 950.164060] env[62585]: value = "task-1384996" [ 950.164060] env[62585]: _type = "Task" [ 950.164060] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.175799] env[62585]: DEBUG oslo_vmware.api [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.304332] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384993, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.493499] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 950.537046] env[62585]: DEBUG nova.compute.utils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 950.541802] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 950.541802] env[62585]: DEBUG nova.network.neutron [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.579544] env[62585]: DEBUG nova.network.neutron [-] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.597202] env[62585]: DEBUG nova.policy [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.601296] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33c166e4-3217-45b6-b315-038e249f0a82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.611649] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919e1326-d5c5-4ebd-93d5-39bd4b4614bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.637466] env[62585]: DEBUG nova.compute.manager [req-ac6e5377-4bf2-4527-96b9-32242a24a6be req-c558d303-4422-4ed6-a9f2-65c4aab4e5d0 service nova] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Detach interface failed, port_id=840822b3-e947-451f-90bf-03eafebebf95, reason: Instance a634a80e-d90a-4ce3-8233-75657a7754be could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 950.674397] env[62585]: DEBUG oslo_vmware.api [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1384996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208517} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.674670] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.674858] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.675048] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.675834] env[62585]: INFO nova.compute.manager [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Took 1.12 seconds to destroy the instance on the hypervisor. [ 950.675834] env[62585]: DEBUG oslo.service.loopingcall [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.675834] env[62585]: DEBUG nova.compute.manager [-] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 950.675834] env[62585]: DEBUG nova.network.neutron [-] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.805448] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384993, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.023153] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.042113] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 951.082380] env[62585]: INFO nova.compute.manager [-] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Took 1.27 seconds to deallocate network for instance. [ 951.093584] env[62585]: DEBUG nova.network.neutron [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Successfully created port: ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.209438] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726be394-1c9a-4727-854c-a8e027d8eb71 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.217404] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec1279d-71e4-4127-a4dd-c9a0cac450b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.248978] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04977c0-7d9a-42cc-a2aa-a6bc53e9c8bd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.256953] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e800d994-f990-4664-9afa-c70f3e224959 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.271311] env[62585]: DEBUG nova.compute.provider_tree [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.305327] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384993, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.576465] env[62585]: DEBUG nova.network.neutron [-] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.591370] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.774395] env[62585]: DEBUG nova.scheduler.client.report [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.806095] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384993, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.31177} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.806355] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/0a472987-1439-41cb-99c4-bd857ec93b02/0a472987-1439-41cb-99c4-bd857ec93b02.vmdk to [datastore1] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 951.807145] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2269b88-e760-41f7-8dee-0414d373c5b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.830808] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk or device None with type streamOptimized {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.831104] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b01ef383-8034-4c92-8848-9999107684f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.849672] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 951.849672] env[62585]: value = "task-1384997" [ 951.849672] env[62585]: _type = "Task" [ 951.849672] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.857583] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.055610] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 952.078714] env[62585]: INFO nova.compute.manager [-] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Took 1.40 seconds to deallocate network for instance. [ 952.086771] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.086771] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.086771] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.087056] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.087236] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.087387] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.087674] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.087869] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.088062] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.088234] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.088413] env[62585]: DEBUG nova.virt.hardware [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.089374] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ab188e-f511-457a-9f0c-16368988c992 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.097929] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72fca99-d97e-4058-827d-2e4fcdfe6ee6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.123664] env[62585]: DEBUG nova.compute.manager [req-05d77e70-ebff-4429-bf04-fad94be26df1 req-d8754fa4-8780-4066-8953-6f0e1d3e9996 service nova] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Received event network-vif-deleted-5606ac00-3eff-44ad-9c80-a4014e4c2724 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 952.279972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.280557] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 952.283140] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.672s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.283367] env[62585]: DEBUG nova.objects.instance [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lazy-loading 'resources' on Instance uuid 4b080cc3-e1cc-4b64-9926-c37b891444f5 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.359295] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384997, 'name': ReconfigVM_Task, 'duration_secs': 0.287607} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.359568] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Reconfigured VM instance instance-00000046 to attach disk [datastore1] abf4a205-fcee-46e4-85b6-10a452cc0312/abf4a205-fcee-46e4-85b6-10a452cc0312.vmdk or device None with type streamOptimized {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.360172] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f1ec96d-0642-45ff-b27e-7d6255e1729e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.366242] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 952.366242] env[62585]: value = "task-1384998" [ 952.366242] env[62585]: _type = "Task" [ 952.366242] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.373200] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384998, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.573429] env[62585]: DEBUG nova.network.neutron [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Successfully updated port: ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.593794] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.786308] env[62585]: DEBUG nova.compute.utils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 952.790418] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 952.790652] env[62585]: DEBUG nova.network.neutron [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.830651] env[62585]: DEBUG nova.policy [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28834cc42f8a49cebca5647badabf8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c49ab537d42244f495aaa3cbdaafc6b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 952.876704] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384998, 'name': Rename_Task, 'duration_secs': 0.136496} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.877147] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.877251] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a93e480-ff90-44e6-8ca0-1585f0938ad2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.885123] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 952.885123] env[62585]: value = "task-1384999" [ 952.885123] env[62585]: _type = "Task" [ 952.885123] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.894187] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.937352] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ade7d98-cc17-4517-b4fd-99e6d7b81eec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.945647] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dc8a84-7e88-4f43-8bb0-2a2b4884c12f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.978177] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b679d7bb-f1eb-4932-a572-2ecbc65703b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.986161] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eda595b-a45d-4678-99f2-5b431e38d6a6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.999900] env[62585]: DEBUG nova.compute.provider_tree [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.076388] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-c8f23f36-b035-467e-959a-37fc0b6462ad" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.076698] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-c8f23f36-b035-467e-959a-37fc0b6462ad" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.076698] env[62585]: DEBUG nova.network.neutron [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.155361] env[62585]: DEBUG nova.network.neutron [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Successfully created port: fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.291443] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 953.395239] env[62585]: DEBUG oslo_vmware.api [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1384999, 'name': PowerOnVM_Task, 'duration_secs': 0.504828} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.395552] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.488110] env[62585]: DEBUG nova.compute.manager [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.489106] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d50795b-a158-4a42-9518-1edffabe6490 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.506508] env[62585]: DEBUG nova.scheduler.client.report [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 953.607252] env[62585]: DEBUG nova.network.neutron [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.745629] env[62585]: DEBUG nova.network.neutron [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Updating instance_info_cache with network_info: [{"id": "ad5d5eb5-2914-4674-81d4-e271a39d2b67", "address": "fa:16:3e:e5:13:b1", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad5d5eb5-29", "ovs_interfaceid": "ad5d5eb5-2914-4674-81d4-e271a39d2b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.004866] env[62585]: DEBUG oslo_concurrency.lockutils [None req-22d1024f-eb44-4201-9c06-db5fd8f0649c tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.175s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.011635] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.728s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.013865] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.883s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.015410] env[62585]: INFO nova.compute.claims [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.032821] env[62585]: INFO nova.scheduler.client.report [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Deleted allocations for instance 4b080cc3-e1cc-4b64-9926-c37b891444f5 [ 954.200293] env[62585]: DEBUG nova.compute.manager [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Received event network-vif-plugged-ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 954.200551] env[62585]: DEBUG oslo_concurrency.lockutils [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] Acquiring lock "c8f23f36-b035-467e-959a-37fc0b6462ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.200799] env[62585]: DEBUG oslo_concurrency.lockutils [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.200947] env[62585]: DEBUG oslo_concurrency.lockutils [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.201137] env[62585]: DEBUG nova.compute.manager [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] No waiting events found dispatching network-vif-plugged-ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 954.201309] env[62585]: WARNING nova.compute.manager [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Received unexpected event network-vif-plugged-ad5d5eb5-2914-4674-81d4-e271a39d2b67 for instance with vm_state building and task_state spawning. [ 954.201498] env[62585]: DEBUG nova.compute.manager [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Received event network-changed-ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 954.201670] env[62585]: DEBUG nova.compute.manager [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Refreshing instance network info cache due to event network-changed-ad5d5eb5-2914-4674-81d4-e271a39d2b67. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 954.201843] env[62585]: DEBUG oslo_concurrency.lockutils [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] Acquiring lock "refresh_cache-c8f23f36-b035-467e-959a-37fc0b6462ad" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.251593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-c8f23f36-b035-467e-959a-37fc0b6462ad" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.252014] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Instance network_info: |[{"id": "ad5d5eb5-2914-4674-81d4-e271a39d2b67", "address": "fa:16:3e:e5:13:b1", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad5d5eb5-29", "ovs_interfaceid": "ad5d5eb5-2914-4674-81d4-e271a39d2b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 954.252585] env[62585]: DEBUG oslo_concurrency.lockutils [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] Acquired lock "refresh_cache-c8f23f36-b035-467e-959a-37fc0b6462ad" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.252585] env[62585]: DEBUG nova.network.neutron [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Refreshing network info cache for port ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.254101] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:13:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad5d5eb5-2914-4674-81d4-e271a39d2b67', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.262328] env[62585]: DEBUG oslo.service.loopingcall [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.265096] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.265938] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d53a74-b9b2-4d16-b22c-f67ce246bf14 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.287125] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.287125] env[62585]: value = "task-1385000" [ 954.287125] env[62585]: _type = "Task" [ 954.287125] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.296100] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385000, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.301426] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 954.335258] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.335601] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.335809] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.336304] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.336427] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.336999] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.337269] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.337516] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.337699] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.337875] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.339392] env[62585]: DEBUG nova.virt.hardware [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.339933] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028c826a-2738-4e42-8612-30c4b9e904c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.349535] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfb1ba7-f1a8-4d8d-bbbf-419c1a45d854 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.539712] env[62585]: DEBUG oslo_concurrency.lockutils [None req-34d8246e-6d0f-46dc-b3a5-fcfa5b7bc49f tempest-ListServerFiltersTestJSON-1204357260 tempest-ListServerFiltersTestJSON-1204357260-project-member] Lock "4b080cc3-e1cc-4b64-9926-c37b891444f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.911s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.673954] env[62585]: DEBUG nova.network.neutron [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Updated VIF entry in instance network info cache for port ad5d5eb5-2914-4674-81d4-e271a39d2b67. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.674755] env[62585]: DEBUG nova.network.neutron [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Updating instance_info_cache with network_info: [{"id": "ad5d5eb5-2914-4674-81d4-e271a39d2b67", "address": "fa:16:3e:e5:13:b1", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad5d5eb5-29", "ovs_interfaceid": "ad5d5eb5-2914-4674-81d4-e271a39d2b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.796911] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385000, 'name': CreateVM_Task, 'duration_secs': 0.326544} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.799097] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.799097] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.799097] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.799097] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 954.799097] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afc8c05d-dcd2-4f0e-b8c6-954ac68d2efa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.803439] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 954.803439] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522c844d-cc4d-74ee-5d72-19f6b71a9cde" [ 954.803439] env[62585]: _type = "Task" [ 954.803439] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.811178] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522c844d-cc4d-74ee-5d72-19f6b71a9cde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.846627] env[62585]: DEBUG nova.network.neutron [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Successfully updated port: fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.147224] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc5b9ce-abc7-494e-b2ce-b68ccd0f9bb2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.155195] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff3e422-5b23-4488-99eb-fc31b681d798 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.185343] env[62585]: DEBUG oslo_concurrency.lockutils [req-a89a86e1-5b03-49c1-814c-b88923387ca9 req-f1966bbb-1283-4550-bba2-b45829ad7152 service nova] Releasing lock "refresh_cache-c8f23f36-b035-467e-959a-37fc0b6462ad" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.185869] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 955.186281] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 955.188070] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb64371-606a-48ca-8e39-b6ccabb20987 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.200295] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839dd1ca-2874-4a81-b06d-22858a9041ac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.215109] env[62585]: DEBUG nova.compute.provider_tree [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.313698] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522c844d-cc4d-74ee-5d72-19f6b71a9cde, 'name': SearchDatastore_Task, 'duration_secs': 0.010374} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.314018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.314261] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.314528] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.314684] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.314861] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.315141] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c980583a-825b-4e28-99bc-b5e7c7a5e61b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.323110] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.323296] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.323997] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a35c59c-27b4-4467-8c17-3594ed6eb7e7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.329113] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 955.329113] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bb68b6-b633-15e0-7f94-35c1d3a9aa6b" [ 955.329113] env[62585]: _type = "Task" [ 955.329113] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.336137] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bb68b6-b633-15e0-7f94-35c1d3a9aa6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.348923] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-a26fb190-e6e6-48ab-a1d6-c662421a965f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.349072] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-a26fb190-e6e6-48ab-a1d6-c662421a965f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.349223] env[62585]: DEBUG nova.network.neutron [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.490744] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a6318c-7f5a-474b-900b-ea1cedb98e48 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.497554] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Suspending the VM {{(pid=62585) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 955.497799] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-61babf53-d2d9-4c8e-bde2-8c0c7a9540d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.504651] env[62585]: DEBUG oslo_vmware.api [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 955.504651] env[62585]: value = "task-1385001" [ 955.504651] env[62585]: _type = "Task" [ 955.504651] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.512015] env[62585]: DEBUG oslo_vmware.api [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385001, 'name': SuspendVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.697261] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 955.697261] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 955.720022] env[62585]: DEBUG nova.scheduler.client.report [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.841259] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bb68b6-b633-15e0-7f94-35c1d3a9aa6b, 'name': SearchDatastore_Task, 'duration_secs': 0.007864} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.842269] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bbd4408-22ff-4f62-98b0-5c3d6e5273bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.847362] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 955.847362] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eb9531-92f4-daf7-2855-199b2d17a491" [ 955.847362] env[62585]: _type = "Task" [ 955.847362] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.856929] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eb9531-92f4-daf7-2855-199b2d17a491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.885161] env[62585]: DEBUG nova.network.neutron [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.015842] env[62585]: DEBUG oslo_vmware.api [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385001, 'name': SuspendVM_Task} progress is 70%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.035813] env[62585]: DEBUG nova.network.neutron [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Updating instance_info_cache with network_info: [{"id": "fc94e060-472d-4599-81dc-e0fb45aaa2ce", "address": "fa:16:3e:7c:73:5e", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc94e060-47", "ovs_interfaceid": "fc94e060-472d-4599-81dc-e0fb45aaa2ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.224914] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.225479] env[62585]: DEBUG nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 956.229988] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.207s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.234671] env[62585]: INFO nova.compute.claims [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.238653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.238653] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquired lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.238653] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Forcefully refreshing network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 956.249318] env[62585]: DEBUG nova.compute.manager [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Received event network-vif-plugged-fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 956.250025] env[62585]: DEBUG oslo_concurrency.lockutils [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] Acquiring lock "a26fb190-e6e6-48ab-a1d6-c662421a965f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.250025] env[62585]: DEBUG oslo_concurrency.lockutils [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.250025] env[62585]: DEBUG oslo_concurrency.lockutils [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.250368] env[62585]: DEBUG nova.compute.manager [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] No waiting events found dispatching network-vif-plugged-fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 956.250368] env[62585]: WARNING nova.compute.manager [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Received unexpected event network-vif-plugged-fc94e060-472d-4599-81dc-e0fb45aaa2ce for instance with vm_state building and task_state spawning. [ 956.250441] env[62585]: DEBUG nova.compute.manager [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Received event network-changed-fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 956.250677] env[62585]: DEBUG nova.compute.manager [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Refreshing instance network info cache due to event network-changed-fc94e060-472d-4599-81dc-e0fb45aaa2ce. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 956.250781] env[62585]: DEBUG oslo_concurrency.lockutils [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] Acquiring lock "refresh_cache-a26fb190-e6e6-48ab-a1d6-c662421a965f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.359351] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52eb9531-92f4-daf7-2855-199b2d17a491, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.359625] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.359883] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] c8f23f36-b035-467e-959a-37fc0b6462ad/c8f23f36-b035-467e-959a-37fc0b6462ad.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.360160] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea834998-2fb6-4328-b4ff-85b187c765ac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.367210] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 956.367210] env[62585]: value = "task-1385002" [ 956.367210] env[62585]: _type = "Task" [ 956.367210] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.374851] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.520434] env[62585]: DEBUG oslo_vmware.api [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385001, 'name': SuspendVM_Task, 'duration_secs': 0.566341} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.520434] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Suspended the VM {{(pid=62585) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 956.520434] env[62585]: DEBUG nova.compute.manager [None req-460a41e2-1af4-4c38-9216-5f5ea444257a tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 956.520434] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7365d9-5649-4ae1-9a2c-724a0fcbbbff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.538651] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-a26fb190-e6e6-48ab-a1d6-c662421a965f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.538999] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Instance network_info: |[{"id": "fc94e060-472d-4599-81dc-e0fb45aaa2ce", "address": "fa:16:3e:7c:73:5e", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc94e060-47", "ovs_interfaceid": "fc94e060-472d-4599-81dc-e0fb45aaa2ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 956.539362] env[62585]: DEBUG oslo_concurrency.lockutils [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] Acquired lock "refresh_cache-a26fb190-e6e6-48ab-a1d6-c662421a965f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.539557] env[62585]: DEBUG nova.network.neutron [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Refreshing network info cache for port fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.540901] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:73:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc94e060-472d-4599-81dc-e0fb45aaa2ce', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.550031] env[62585]: DEBUG oslo.service.loopingcall [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.550152] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.550374] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bca9acf4-6d6f-483f-bc06-14b094d5cfed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.572396] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.572396] env[62585]: value = "task-1385003" [ 956.572396] env[62585]: _type = "Task" [ 956.572396] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.581627] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385003, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.741545] env[62585]: DEBUG nova.compute.utils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.746546] env[62585]: DEBUG nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 956.879346] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437358} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.879664] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] c8f23f36-b035-467e-959a-37fc0b6462ad/c8f23f36-b035-467e-959a-37fc0b6462ad.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.879881] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.880162] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e9fc9c1-09a1-40be-bffe-53eb328400ca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.893623] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 956.893623] env[62585]: value = "task-1385004" [ 956.893623] env[62585]: _type = "Task" [ 956.893623] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.902722] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.084028] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385003, 'name': CreateVM_Task, 'duration_secs': 0.411842} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.084028] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.084028] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.084247] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.084453] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 957.084739] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b4abe9e-bd29-43c0-b1c0-8c07e447bba3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.090022] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 957.090022] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b80daf-4b5c-3c3f-26b4-dfa853ec015a" [ 957.090022] env[62585]: _type = "Task" [ 957.090022] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.097155] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b80daf-4b5c-3c3f-26b4-dfa853ec015a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.247384] env[62585]: DEBUG nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 957.258523] env[62585]: DEBUG nova.network.neutron [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Updated VIF entry in instance network info cache for port fc94e060-472d-4599-81dc-e0fb45aaa2ce. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.258904] env[62585]: DEBUG nova.network.neutron [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Updating instance_info_cache with network_info: [{"id": "fc94e060-472d-4599-81dc-e0fb45aaa2ce", "address": "fa:16:3e:7c:73:5e", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc94e060-47", "ovs_interfaceid": "fc94e060-472d-4599-81dc-e0fb45aaa2ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.400706] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00071d2c-5c9c-42ef-9445-8abfbe0f8535 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.408717] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092104} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.412118] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.414314] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664cc277-3c6a-48a9-94e2-8c0138a19fc6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.418891] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fda99a-d163-4f7a-8da8-80064919b0f7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.441381] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] c8f23f36-b035-467e-959a-37fc0b6462ad/c8f23f36-b035-467e-959a-37fc0b6462ad.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.470020] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdec2102-c913-4132-9247-fb2c65110b6b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.487194] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b619c7b-0322-4fb4-b938-c4a61e31f8bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.497053] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a4f804-80a3-4bb3-a17e-1bfd1695b5fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.500507] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 957.500507] env[62585]: value = "task-1385005" [ 957.500507] env[62585]: _type = "Task" [ 957.500507] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.512444] env[62585]: DEBUG nova.compute.provider_tree [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.519712] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.603435] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b80daf-4b5c-3c3f-26b4-dfa853ec015a, 'name': SearchDatastore_Task, 'duration_secs': 0.048169} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.604111] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [{"id": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "address": "fa:16:3e:31:3b:9c", "network": {"id": "8a8daef6-7b2d-44f6-8f2a-5cdf4dfff449", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-240024676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9645866ca8f0433cae30cf5867244ca8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3d19ab-ba", "ovs_interfaceid": "bc3d19ab-ba98-4935-9e08-61c5df21be43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.606856] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.607142] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.607390] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.607540] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.607723] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.608008] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64c2ffad-7833-4cf8-93f9-acc500b8e404 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.617583] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.617773] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.618681] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7264198-c90e-4a3b-8f44-bc5a8c87d41e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.624984] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 957.624984] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52fa807c-4a32-800b-c1d5-5b4ddcca11df" [ 957.624984] env[62585]: _type = "Task" [ 957.624984] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.637266] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52fa807c-4a32-800b-c1d5-5b4ddcca11df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.762983] env[62585]: DEBUG oslo_concurrency.lockutils [req-8b091326-1aa6-402e-b700-dd326c00da91 req-41beaa35-1d3f-4b9d-9a20-22773899b8a2 service nova] Releasing lock "refresh_cache-a26fb190-e6e6-48ab-a1d6-c662421a965f" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.010129] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385005, 'name': ReconfigVM_Task, 'duration_secs': 0.340018} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.010402] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Reconfigured VM instance instance-0000005c to attach disk [datastore1] c8f23f36-b035-467e-959a-37fc0b6462ad/c8f23f36-b035-467e-959a-37fc0b6462ad.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.011082] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bb81b42-274b-4dd4-bc16-6f30fafeae6d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.014526] env[62585]: DEBUG nova.scheduler.client.report [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 958.018641] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 958.018641] env[62585]: value = "task-1385006" [ 958.018641] env[62585]: _type = "Task" [ 958.018641] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.027779] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385006, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.070477] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "b7b8338a-2e9f-4854-8f4d-ede21b150317" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.070799] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.107528] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Releasing lock "refresh_cache-6057e13b-71df-458d-b6ed-c139a8c57836" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.107734] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updated the network info_cache for instance {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 958.107920] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.108198] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.108394] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.108613] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.108824] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.108975] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.109119] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 958.109305] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.135476] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52fa807c-4a32-800b-c1d5-5b4ddcca11df, 'name': SearchDatastore_Task, 'duration_secs': 0.018286} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.136496] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a8e501c-3203-4341-888f-437d5d28ede8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.141706] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 958.141706] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526ac4c5-d3c0-1079-19a2-f45e1e52ba44" [ 958.141706] env[62585]: _type = "Task" [ 958.141706] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.152481] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526ac4c5-d3c0-1079-19a2-f45e1e52ba44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.228218] env[62585]: INFO nova.compute.manager [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Resuming [ 958.230403] env[62585]: DEBUG nova.objects.instance [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'flavor' on Instance uuid abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.261584] env[62585]: DEBUG nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 958.290677] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.291086] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.291285] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.291368] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.291570] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.292424] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.292424] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.292424] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.292768] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.293250] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.293514] env[62585]: DEBUG nova.virt.hardware [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.294558] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0296940b-7e9a-4ce6-b9ca-69a1935e5311 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.303419] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90673cae-d6bd-4b36-b0b9-ed4ba06b1bf5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.321683] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.332061] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Creating folder: Project (6d433beec2514d71b5c4f5fb6c08a1a7). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.333733] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38cfc829-6e4f-443c-9713-f102f20f023a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.345816] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Created folder: Project (6d433beec2514d71b5c4f5fb6c08a1a7) in parent group-v293962. [ 958.346045] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Creating folder: Instances. Parent ref: group-v294055. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.346307] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e57d06e-98f1-41d5-9588-803ad641ecd9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.355590] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Created folder: Instances in parent group-v294055. [ 958.355833] env[62585]: DEBUG oslo.service.loopingcall [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.356039] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.356253] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d40b2c64-cd7d-4be3-beb7-2b02122e5947 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.376079] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.376079] env[62585]: value = "task-1385009" [ 958.376079] env[62585]: _type = "Task" [ 958.376079] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.384383] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385009, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.523685] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.524274] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 958.530869] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.936s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.530869] env[62585]: DEBUG nova.objects.instance [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lazy-loading 'resources' on Instance uuid a634a80e-d90a-4ce3-8233-75657a7754be {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.533961] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385006, 'name': Rename_Task, 'duration_secs': 0.137496} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.534869] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 958.534869] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1f5fd73-4c02-40e2-a883-443108c41d2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.541571] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 958.541571] env[62585]: value = "task-1385010" [ 958.541571] env[62585]: _type = "Task" [ 958.541571] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.549915] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.574526] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 958.612263] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.651505] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526ac4c5-d3c0-1079-19a2-f45e1e52ba44, 'name': SearchDatastore_Task, 'duration_secs': 0.010703} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.651697] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.651964] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] a26fb190-e6e6-48ab-a1d6-c662421a965f/a26fb190-e6e6-48ab-a1d6-c662421a965f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.652242] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-424c8ef5-6303-4ce8-9bdc-dbf2725f1a68 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.658685] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 958.658685] env[62585]: value = "task-1385011" [ 958.658685] env[62585]: _type = "Task" [ 958.658685] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.668458] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.889353] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385009, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.030972] env[62585]: DEBUG nova.compute.utils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 959.032606] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 959.032790] env[62585]: DEBUG nova.network.neutron [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 959.054580] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385010, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.079997] env[62585]: DEBUG nova.policy [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01befe1db3684d60943c74da2c2c9fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f00751679b29472e9ab92c9e48a99925', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 959.102181] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.172819] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489338} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.172819] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] a26fb190-e6e6-48ab-a1d6-c662421a965f/a26fb190-e6e6-48ab-a1d6-c662421a965f.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.172819] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.172819] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0133fd92-7d2c-4974-9e19-5f3055b375f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.179898] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 959.179898] env[62585]: value = "task-1385012" [ 959.179898] env[62585]: _type = "Task" [ 959.179898] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.188366] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.190193] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dd9d9a-fba7-4f42-897c-4706581c11b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.197413] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4888baf0-3756-4eb6-a5a5-47b928db78e9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.228584] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f162ecba-8b5d-4e3a-88a4-a6edbe8d2b62 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.236476] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.236615] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquired lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.236822] env[62585]: DEBUG nova.network.neutron [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.241271] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92709ea9-3a60-4815-b5cb-48069509816f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.256128] env[62585]: DEBUG nova.compute.provider_tree [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.361624] env[62585]: DEBUG nova.network.neutron [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Successfully created port: 68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.387303] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385009, 'name': CreateVM_Task, 'duration_secs': 0.645793} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.387469] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 959.387887] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.388068] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.388384] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.388659] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f61f93-4652-4355-ad73-c52ee2a191d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.392883] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 959.392883] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52742f05-1e43-eca1-6fd2-4b8746fedac4" [ 959.392883] env[62585]: _type = "Task" [ 959.392883] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.400206] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52742f05-1e43-eca1-6fd2-4b8746fedac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.535623] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 959.552566] env[62585]: DEBUG oslo_vmware.api [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385010, 'name': PowerOnVM_Task, 'duration_secs': 0.525091} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.552834] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.553137] env[62585]: INFO nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Took 7.50 seconds to spawn the instance on the hypervisor. [ 959.553350] env[62585]: DEBUG nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.555178] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38ff531-0189-4180-961f-ab56cb85316a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.689593] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070078} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.690497] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.690876] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1dd218-093a-4420-85e9-4a37e361621f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.712026] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] a26fb190-e6e6-48ab-a1d6-c662421a965f/a26fb190-e6e6-48ab-a1d6-c662421a965f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.712269] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-168e3dea-9ba4-4572-a138-7df1aaee075d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.731834] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 959.731834] env[62585]: value = "task-1385013" [ 959.731834] env[62585]: _type = "Task" [ 959.731834] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.739353] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.759548] env[62585]: DEBUG nova.scheduler.client.report [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.904041] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52742f05-1e43-eca1-6fd2-4b8746fedac4, 'name': SearchDatastore_Task, 'duration_secs': 0.0093} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.904041] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.904454] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.904454] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.904569] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.904680] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.904932] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e711ff35-01c9-49ad-b959-47407500417d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.919954] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.920193] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.920948] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-478e575d-e08b-4c65-8ca9-9cc1c2595008 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.929787] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 959.929787] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52898fee-0c4d-e57d-1a24-b194db0e083c" [ 959.929787] env[62585]: _type = "Task" [ 959.929787] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.937802] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52898fee-0c4d-e57d-1a24-b194db0e083c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.077552] env[62585]: INFO nova.compute.manager [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Took 18.69 seconds to build instance. [ 960.242398] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385013, 'name': ReconfigVM_Task, 'duration_secs': 0.269611} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.242678] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Reconfigured VM instance instance-0000005d to attach disk [datastore1] a26fb190-e6e6-48ab-a1d6-c662421a965f/a26fb190-e6e6-48ab-a1d6-c662421a965f.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.243521] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04693824-1e9e-4ab2-a686-8f9bd8241832 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.249954] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 960.249954] env[62585]: value = "task-1385014" [ 960.249954] env[62585]: _type = "Task" [ 960.249954] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.254434] env[62585]: DEBUG nova.network.neutron [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [{"id": "c32c8966-edf9-44a6-9263-00c85e124ab0", "address": "fa:16:3e:9d:f5:11", "network": {"id": "2b85c6b0-fc8f-4275-94c8-9262d8ea21cd", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-609771769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34c6f21d288e47dd94ccbe12526fe4e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc32c8966-ed", "ovs_interfaceid": "c32c8966-edf9-44a6-9263-00c85e124ab0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.259265] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385014, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.264559] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.737s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.267579] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.674s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.267808] env[62585]: DEBUG nova.objects.instance [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lazy-loading 'resources' on Instance uuid 891e5a42-3681-47eb-ac88-015fa21a6580 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.287551] env[62585]: INFO nova.scheduler.client.report [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Deleted allocations for instance a634a80e-d90a-4ce3-8233-75657a7754be [ 960.445686] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52898fee-0c4d-e57d-1a24-b194db0e083c, 'name': SearchDatastore_Task, 'duration_secs': 0.011361} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.447125] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e832c163-c502-4432-bb70-8da1d35c6079 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.454941] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 960.454941] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a5be92-0c1f-ab83-b01a-02744e284830" [ 960.454941] env[62585]: _type = "Task" [ 960.454941] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.467958] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a5be92-0c1f-ab83-b01a-02744e284830, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.545478] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 960.571053] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 960.571313] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 960.571473] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.571660] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 960.571811] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.571959] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 960.572682] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 960.572878] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 960.573102] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 960.573283] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 960.573463] env[62585]: DEBUG nova.virt.hardware [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 960.574344] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332d9e3b-fe87-4124-b565-87f350415260 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.582802] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d1c7373b-85b1-466b-8380-f6f5bcee929b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.206s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.587806] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e23759-06a5-4cf0-b478-7bc77b2eec78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.760551] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Releasing lock "refresh_cache-abf4a205-fcee-46e4-85b6-10a452cc0312" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.761046] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385014, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.761807] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee9d5e4-91bb-4577-8f82-82d9ef9a4c14 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.767754] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Resuming the VM {{(pid=62585) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 960.768028] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99554290-4b02-42fb-85f9-bdcec4a08e49 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.776895] env[62585]: DEBUG oslo_vmware.api [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 960.776895] env[62585]: value = "task-1385015" [ 960.776895] env[62585]: _type = "Task" [ 960.776895] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.787239] env[62585]: DEBUG oslo_vmware.api [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.795884] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b6928cc6-3737-4edb-9b03-de3af777f300 tempest-AttachInterfacesTestJSON-1857705027 tempest-AttachInterfacesTestJSON-1857705027-project-member] Lock "a634a80e-d90a-4ce3-8233-75657a7754be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.134s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.820592] env[62585]: DEBUG nova.compute.manager [req-acdf365f-5bb7-4ee9-a488-cb99bcfd993a req-09a47d2c-9af0-4315-ae41-318791668d93 service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Received event network-vif-plugged-68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.820829] env[62585]: DEBUG oslo_concurrency.lockutils [req-acdf365f-5bb7-4ee9-a488-cb99bcfd993a req-09a47d2c-9af0-4315-ae41-318791668d93 service nova] Acquiring lock "b0885bdd-bc8d-4311-8388-54bdc22144c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.821064] env[62585]: DEBUG oslo_concurrency.lockutils [req-acdf365f-5bb7-4ee9-a488-cb99bcfd993a req-09a47d2c-9af0-4315-ae41-318791668d93 service nova] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.821241] env[62585]: DEBUG oslo_concurrency.lockutils [req-acdf365f-5bb7-4ee9-a488-cb99bcfd993a req-09a47d2c-9af0-4315-ae41-318791668d93 service nova] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.821413] env[62585]: DEBUG nova.compute.manager [req-acdf365f-5bb7-4ee9-a488-cb99bcfd993a req-09a47d2c-9af0-4315-ae41-318791668d93 service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] No waiting events found dispatching network-vif-plugged-68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 960.821583] env[62585]: WARNING nova.compute.manager [req-acdf365f-5bb7-4ee9-a488-cb99bcfd993a req-09a47d2c-9af0-4315-ae41-318791668d93 service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Received unexpected event network-vif-plugged-68e2a061-2351-4cbb-bc96-6898b8dac94e for instance with vm_state building and task_state spawning. [ 960.922136] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ca04d7-8d19-4595-a505-4021be542665 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.924762] env[62585]: DEBUG nova.network.neutron [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Successfully updated port: 68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 960.932114] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba87064-2df2-482b-b6ee-e04a66cb8951 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.967412] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780ae20e-d5f9-48ef-9cea-4ce784e7484e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.975704] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a5be92-0c1f-ab83-b01a-02744e284830, 'name': SearchDatastore_Task, 'duration_secs': 0.01941} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.977950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.978270] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 65ed4088-2cc5-4c00-94af-f714ec608fd8/65ed4088-2cc5-4c00-94af-f714ec608fd8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.978919] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9faffe3c-ab55-486f-8a50-ce5051b14a0d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.981843] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615c4970-abef-43d0-87f4-f8bfc653db53 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.995832] env[62585]: DEBUG nova.compute.provider_tree [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.998476] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 960.998476] env[62585]: value = "task-1385016" [ 960.998476] env[62585]: _type = "Task" [ 960.998476] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.006851] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.083222] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "c8f23f36-b035-467e-959a-37fc0b6462ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.083482] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.083703] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "c8f23f36-b035-467e-959a-37fc0b6462ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.083892] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.084079] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.086326] env[62585]: INFO nova.compute.manager [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Terminating instance [ 961.088112] env[62585]: DEBUG nova.compute.manager [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 961.088310] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.089202] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6da271-7d17-4464-af30-27784045eccd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.098324] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.098602] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b57bacf7-3eac-422a-a451-ba63fbc325a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.104584] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 961.104584] env[62585]: value = "task-1385017" [ 961.104584] env[62585]: _type = "Task" [ 961.104584] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.113072] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385017, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.263456] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385014, 'name': Rename_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.290148] env[62585]: DEBUG oslo_vmware.api [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385015, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.427627] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "refresh_cache-b0885bdd-bc8d-4311-8388-54bdc22144c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.427824] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "refresh_cache-b0885bdd-bc8d-4311-8388-54bdc22144c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.428029] env[62585]: DEBUG nova.network.neutron [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.500642] env[62585]: DEBUG nova.scheduler.client.report [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 961.522289] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385016, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.615084] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385017, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.761517] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385014, 'name': Rename_Task, 'duration_secs': 1.380597} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.761952] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.762345] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fe99a5f-1815-48e0-a09f-95c3ee5ffce0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.768890] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 961.768890] env[62585]: value = "task-1385018" [ 961.768890] env[62585]: _type = "Task" [ 961.768890] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.776953] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.786900] env[62585]: DEBUG oslo_vmware.api [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385015, 'name': PowerOnVM_Task, 'duration_secs': 0.61753} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.787190] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Resumed the VM {{(pid=62585) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 961.787374] env[62585]: DEBUG nova.compute.manager [None req-7f6af7f0-6208-42a0-b681-6fb466097921 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 961.788167] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac331a4-e518-4a45-a583-773432474baf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.963357] env[62585]: DEBUG nova.network.neutron [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.016566] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.019319] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602642} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.019319] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.407s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.019475] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.019569] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 962.020403] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.918s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.021756] env[62585]: INFO nova.compute.claims [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.024315] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 65ed4088-2cc5-4c00-94af-f714ec608fd8/65ed4088-2cc5-4c00-94af-f714ec608fd8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.024641] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.025700] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b543ecdb-a381-45a1-9273-e74f3e13545c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.028609] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e508ef71-70ae-4c11-8c02-4d39065ae5f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.039361] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76295575-1f53-439c-94a5-0d6836f84150 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.042838] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 962.042838] env[62585]: value = "task-1385019" [ 962.042838] env[62585]: _type = "Task" [ 962.042838] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.046210] env[62585]: INFO nova.scheduler.client.report [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted allocations for instance 891e5a42-3681-47eb-ac88-015fa21a6580 [ 962.065050] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a8de1e-c4f7-483c-995f-df0f20f060e6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.070971] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385019, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.076274] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceda8d2-ad31-41b5-88ba-908e4b39ab10 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.109218] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180163MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 962.109447] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.120498] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385017, 'name': PowerOffVM_Task, 'duration_secs': 0.593} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.120817] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.120913] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.121137] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a715462d-7600-4d27-b05c-e50376f0faf9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.154014] env[62585]: DEBUG nova.network.neutron [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Updating instance_info_cache with network_info: [{"id": "68e2a061-2351-4cbb-bc96-6898b8dac94e", "address": "fa:16:3e:db:24:69", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68e2a061-23", "ovs_interfaceid": "68e2a061-2351-4cbb-bc96-6898b8dac94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.205040] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.205040] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.205040] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleting the datastore file [datastore1] c8f23f36-b035-467e-959a-37fc0b6462ad {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.205040] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6578935d-926d-4d49-986f-de13ebec9864 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.211579] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 962.211579] env[62585]: value = "task-1385021" [ 962.211579] env[62585]: _type = "Task" [ 962.211579] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.219740] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.278775] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385018, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.557999] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385019, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072367} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.558333] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.559235] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a792d5-f80c-4628-a04b-7e934882b911 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.583664] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 65ed4088-2cc5-4c00-94af-f714ec608fd8/65ed4088-2cc5-4c00-94af-f714ec608fd8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.584270] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7749a114-9c4c-4391-92c9-097e601ada47 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "891e5a42-3681-47eb-ac88-015fa21a6580" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.038s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.585404] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8064c290-f1fa-408d-af98-2d3d3a40d9ec {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.606215] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 962.606215] env[62585]: value = "task-1385022" [ 962.606215] env[62585]: _type = "Task" [ 962.606215] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.616377] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.655983] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "refresh_cache-b0885bdd-bc8d-4311-8388-54bdc22144c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.656357] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Instance network_info: |[{"id": "68e2a061-2351-4cbb-bc96-6898b8dac94e", "address": "fa:16:3e:db:24:69", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68e2a061-23", "ovs_interfaceid": "68e2a061-2351-4cbb-bc96-6898b8dac94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 962.656902] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:24:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68e2a061-2351-4cbb-bc96-6898b8dac94e', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.666057] env[62585]: DEBUG oslo.service.loopingcall [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.666339] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.666940] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-039a79ad-9ebe-4721-8414-43410024b6ce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.690222] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.690222] env[62585]: value = "task-1385023" [ 962.690222] env[62585]: _type = "Task" [ 962.690222] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.697762] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385023, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.721918] env[62585]: DEBUG oslo_vmware.api [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144378} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.722198] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.722391] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 962.723256] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 962.723256] env[62585]: INFO nova.compute.manager [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Took 1.63 seconds to destroy the instance on the hypervisor. [ 962.723256] env[62585]: DEBUG oslo.service.loopingcall [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.723256] env[62585]: DEBUG nova.compute.manager [-] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 962.723256] env[62585]: DEBUG nova.network.neutron [-] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 962.780417] env[62585]: DEBUG oslo_vmware.api [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385018, 'name': PowerOnVM_Task, 'duration_secs': 0.774197} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.780709] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.780924] env[62585]: INFO nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Took 8.48 seconds to spawn the instance on the hypervisor. [ 962.781123] env[62585]: DEBUG nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 962.781955] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a1921e-1e7d-4fbc-b82a-15d7e1e0ed04 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.850558] env[62585]: DEBUG nova.compute.manager [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Received event network-changed-68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.850827] env[62585]: DEBUG nova.compute.manager [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Refreshing instance network info cache due to event network-changed-68e2a061-2351-4cbb-bc96-6898b8dac94e. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 962.851169] env[62585]: DEBUG oslo_concurrency.lockutils [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] Acquiring lock "refresh_cache-b0885bdd-bc8d-4311-8388-54bdc22144c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.851241] env[62585]: DEBUG oslo_concurrency.lockutils [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] Acquired lock "refresh_cache-b0885bdd-bc8d-4311-8388-54bdc22144c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.851370] env[62585]: DEBUG nova.network.neutron [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Refreshing network info cache for port 68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.116985] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385022, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.171636] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efc4bf5-d9b4-4237-bb16-b2fcc2e5ba1c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.179425] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d8cbf9-70ed-4363-ab53-81dfe1ea356a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.214278] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55ad37d-b506-4b2f-8a8f-8f5ad05ef244 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.225985] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed32e15-cd00-4c75-ab55-e57b1f2fba81 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.231242] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385023, 'name': CreateVM_Task, 'duration_secs': 0.344523} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.232278] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.233529] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.233707] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.234785] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 963.234785] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc3b7fb3-37f4-46ae-b28c-126640bc2438 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.244372] env[62585]: DEBUG nova.compute.provider_tree [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.251072] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 963.251072] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520beaaa-4d25-d7e3-5e3c-ab0ded4e42bc" [ 963.251072] env[62585]: _type = "Task" [ 963.251072] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.259795] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520beaaa-4d25-d7e3-5e3c-ab0ded4e42bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.297715] env[62585]: INFO nova.compute.manager [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Took 19.77 seconds to build instance. [ 963.532574] env[62585]: DEBUG nova.network.neutron [-] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.617663] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385022, 'name': ReconfigVM_Task, 'duration_secs': 0.831256} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.617951] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 65ed4088-2cc5-4c00-94af-f714ec608fd8/65ed4088-2cc5-4c00-94af-f714ec608fd8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.618746] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b3bcbbd-062b-4135-a92b-3518764e4dac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.621571] env[62585]: DEBUG nova.network.neutron [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Updated VIF entry in instance network info cache for port 68e2a061-2351-4cbb-bc96-6898b8dac94e. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.621939] env[62585]: DEBUG nova.network.neutron [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Updating instance_info_cache with network_info: [{"id": "68e2a061-2351-4cbb-bc96-6898b8dac94e", "address": "fa:16:3e:db:24:69", "network": {"id": "f73c6c58-29b8-4fb6-a001-94a77e4e6a53", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1579050178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f00751679b29472e9ab92c9e48a99925", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68e2a061-23", "ovs_interfaceid": "68e2a061-2351-4cbb-bc96-6898b8dac94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.630186] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 963.630186] env[62585]: value = "task-1385024" [ 963.630186] env[62585]: _type = "Task" [ 963.630186] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.639776] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385024, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.740922] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ddae4f-961c-4cfe-b762-c6a31eed7bcd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.748193] env[62585]: DEBUG nova.scheduler.client.report [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 963.752380] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Suspending the VM {{(pid=62585) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 963.752720] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a34b11cf-3e15-4642-9596-4cc465e4913b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.768674] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520beaaa-4d25-d7e3-5e3c-ab0ded4e42bc, 'name': SearchDatastore_Task, 'duration_secs': 0.009627} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.768674] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.768674] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.768902] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.769030] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.769323] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.769824] env[62585]: DEBUG oslo_vmware.api [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 963.769824] env[62585]: value = "task-1385025" [ 963.769824] env[62585]: _type = "Task" [ 963.769824] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.771411] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4a509ea-e34d-4f03-aa94-e8eb7066294d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.780542] env[62585]: DEBUG oslo_vmware.api [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385025, 'name': SuspendVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.783391] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.783391] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.783391] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c661528d-3a25-4364-a9d1-ea622d85e005 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.788597] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 963.788597] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6f0dc-e319-f581-bca7-a95bcb696a69" [ 963.788597] env[62585]: _type = "Task" [ 963.788597] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.800467] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5b9f2879-c6e8-4661-aea6-c6dcd90f4b50 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.283s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.800467] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6f0dc-e319-f581-bca7-a95bcb696a69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.035630] env[62585]: INFO nova.compute.manager [-] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Took 1.31 seconds to deallocate network for instance. [ 964.127291] env[62585]: DEBUG oslo_concurrency.lockutils [req-491e2f87-e48b-4dfd-9023-b9348e965926 req-891dc689-acf5-4cb2-8544-d11cad8e1add service nova] Releasing lock "refresh_cache-b0885bdd-bc8d-4311-8388-54bdc22144c2" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.141504] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385024, 'name': Rename_Task, 'duration_secs': 0.198634} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.142269] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.142438] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb16a763-fdac-4918-8a00-9f5acb9a2f7a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.148466] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 964.148466] env[62585]: value = "task-1385026" [ 964.148466] env[62585]: _type = "Task" [ 964.148466] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.156711] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.256108] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.257041] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 964.259206] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.150s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.282372] env[62585]: DEBUG oslo_vmware.api [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385025, 'name': SuspendVM_Task} progress is 62%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.301073] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e6f0dc-e319-f581-bca7-a95bcb696a69, 'name': SearchDatastore_Task, 'duration_secs': 0.009704} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.302104] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1614a128-a1d5-49fc-aef6-d60e7831b5d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.307518] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 964.307518] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520a61e6-0e9c-3500-3347-66b0d54c7d02" [ 964.307518] env[62585]: _type = "Task" [ 964.307518] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.316309] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520a61e6-0e9c-3500-3347-66b0d54c7d02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.542969] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.659313] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385026, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.762524] env[62585]: DEBUG nova.compute.utils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 964.764292] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 964.764463] env[62585]: DEBUG nova.network.neutron [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 964.782787] env[62585]: DEBUG oslo_vmware.api [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385025, 'name': SuspendVM_Task, 'duration_secs': 0.831065} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.783054] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Suspended the VM {{(pid=62585) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 964.783240] env[62585]: DEBUG nova.compute.manager [None req-bd5ff2a0-517f-40f7-b0ef-a1fbf57d8994 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 964.784012] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf0e63a-92b2-4fad-a510-e1a34695dd15 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.815324] env[62585]: DEBUG nova.policy [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73ef782ef63e424195872ee2cf9928b0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca61db1ff3fb4f5cae3dc18e70af8ba7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 964.822847] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520a61e6-0e9c-3500-3347-66b0d54c7d02, 'name': SearchDatastore_Task, 'duration_secs': 0.012067} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.823173] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.823448] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] b0885bdd-bc8d-4311-8388-54bdc22144c2/b0885bdd-bc8d-4311-8388-54bdc22144c2.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.823831] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-349b4089-37f2-4efa-8d48-78f71a52b829 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.830916] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 964.830916] env[62585]: value = "task-1385027" [ 964.830916] env[62585]: _type = "Task" [ 964.830916] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.840793] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.889815] env[62585]: DEBUG nova.compute.manager [req-92467165-99b6-4570-a449-2fa57d1a8214 req-86376bcf-31b6-412a-a1e5-13a2d155bf56 service nova] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Received event network-vif-deleted-ad5d5eb5-2914-4674-81d4-e271a39d2b67 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.089083] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.089346] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.089577] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.089769] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.090047] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.092467] env[62585]: INFO nova.compute.manager [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Terminating instance [ 965.095284] env[62585]: DEBUG nova.compute.manager [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.095491] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.096431] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46685503-a833-4add-948d-4bddd39a5c59 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.107616] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.108397] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1128dda6-6dc8-4f37-8fe6-be88c4eab8d5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.117219] env[62585]: DEBUG oslo_vmware.api [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 965.117219] env[62585]: value = "task-1385028" [ 965.117219] env[62585]: _type = "Task" [ 965.117219] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.128433] env[62585]: DEBUG oslo_vmware.api [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1385028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.161057] env[62585]: DEBUG oslo_vmware.api [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385026, 'name': PowerOnVM_Task, 'duration_secs': 0.857699} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.161364] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.161572] env[62585]: INFO nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Took 6.90 seconds to spawn the instance on the hypervisor. [ 965.161757] env[62585]: DEBUG nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 965.162629] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d801eeb-a47f-4a00-9966-49521e734b27 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.271977] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 965.301567] env[62585]: DEBUG nova.network.neutron [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Successfully created port: d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 965.311082] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 6057e13b-71df-458d-b6ed-c139a8c57836 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.311294] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance e4edc1dd-52ea-428e-832a-b49d3bc4fe14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.311507] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance abf4a205-fcee-46e4-85b6-10a452cc0312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.311598] env[62585]: WARNING nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance c8f23f36-b035-467e-959a-37fc0b6462ad is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.311693] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance a26fb190-e6e6-48ab-a1d6-c662421a965f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.311845] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 65ed4088-2cc5-4c00-94af-f714ec608fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.311932] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance b0885bdd-bc8d-4311-8388-54bdc22144c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.312131] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance b7b8338a-2e9f-4854-8f4d-ede21b150317 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 965.312273] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 965.312464] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 965.345556] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478162} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.345877] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] b0885bdd-bc8d-4311-8388-54bdc22144c2/b0885bdd-bc8d-4311-8388-54bdc22144c2.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.346129] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.346423] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c5b7edb-8e05-473e-9a3e-67713ce713a0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.357067] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 965.357067] env[62585]: value = "task-1385029" [ 965.357067] env[62585]: _type = "Task" [ 965.357067] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.369910] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.466968] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338315b9-4d02-4da4-8bdb-230fd881d2da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.475764] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efa38d9-176e-48d6-a6ab-ccf1f83f9e85 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.508514] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c8d08e-d3d9-4ffa-a274-e6ab4573324c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.516674] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89411141-1ea1-45a5-aeb3-4ade239a5ffa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.530401] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.632192] env[62585]: DEBUG oslo_vmware.api [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1385028, 'name': PowerOffVM_Task, 'duration_secs': 0.287167} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.632774] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.633056] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.633490] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00c0b299-fdf2-43cb-916b-c784056720f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.682760] env[62585]: INFO nova.compute.manager [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Took 16.57 seconds to build instance. [ 965.722025] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.722025] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.722025] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleting the datastore file [datastore2] 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.722025] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9ec2a1b-0a7a-4161-97bd-d87051c1c982 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.729395] env[62585]: DEBUG oslo_vmware.api [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for the task: (returnval){ [ 965.729395] env[62585]: value = "task-1385031" [ 965.729395] env[62585]: _type = "Task" [ 965.729395] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.751756] env[62585]: DEBUG oslo_vmware.api [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1385031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.868840] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07073} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.869322] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.870852] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cf26cc-078b-46e2-a40a-19739304f492 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.903742] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] b0885bdd-bc8d-4311-8388-54bdc22144c2/b0885bdd-bc8d-4311-8388-54bdc22144c2.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.904082] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4dd669e-0275-403d-9bdc-b96b01a0d6c0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.929016] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 965.929016] env[62585]: value = "task-1385032" [ 965.929016] env[62585]: _type = "Task" [ 965.929016] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.938690] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.033201] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.185610] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d700ab01-1f82-4c3f-a7fc-e1503635a0de tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "65ed4088-2cc5-4c00-94af-f714ec608fd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.077s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.241235] env[62585]: DEBUG oslo_vmware.api [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Task: {'id': task-1385031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136711} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.241580] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.241789] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.241964] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.242372] env[62585]: INFO nova.compute.manager [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Took 1.15 seconds to destroy the instance on the hypervisor. [ 966.242642] env[62585]: DEBUG oslo.service.loopingcall [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.242847] env[62585]: DEBUG nova.compute.manager [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.242946] env[62585]: DEBUG nova.network.neutron [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.245403] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "a26fb190-e6e6-48ab-a1d6-c662421a965f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.245666] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.246199] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "a26fb190-e6e6-48ab-a1d6-c662421a965f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.246408] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.246583] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.249623] env[62585]: INFO nova.compute.manager [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Terminating instance [ 966.251589] env[62585]: DEBUG nova.compute.manager [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 966.251785] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 966.252928] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a55160f-011d-4868-8f20-19b2bce257ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.263463] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 966.263558] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc036517-8d09-45c5-aeb0-48fa3501a9f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.286430] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 966.319717] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 966.322258] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 966.322258] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.322483] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 966.322688] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.322889] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 966.323559] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 966.323559] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 966.323967] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 966.324045] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 966.324275] env[62585]: DEBUG nova.virt.hardware [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 966.325536] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170fe91d-85a6-4e2d-b67c-126c797d3cc3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.332165] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.332428] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.332641] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleting the datastore file [datastore1] a26fb190-e6e6-48ab-a1d6-c662421a965f {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.335556] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da82c197-aae4-4417-b2eb-89745cd96c56 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.339718] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6306fe4a-41fe-43e2-bec7-bcaa2687d24f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.357218] env[62585]: DEBUG oslo_vmware.api [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 966.357218] env[62585]: value = "task-1385034" [ 966.357218] env[62585]: _type = "Task" [ 966.357218] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.366383] env[62585]: DEBUG oslo_vmware.api [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.444302] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385032, 'name': ReconfigVM_Task, 'duration_secs': 0.273382} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.444302] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Reconfigured VM instance instance-0000005f to attach disk [datastore2] b0885bdd-bc8d-4311-8388-54bdc22144c2/b0885bdd-bc8d-4311-8388-54bdc22144c2.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.444302] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64e599ad-8f44-4016-aa45-5b383697a28d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.452347] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 966.452347] env[62585]: value = "task-1385035" [ 966.452347] env[62585]: _type = "Task" [ 966.452347] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.462519] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385035, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.539641] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 966.539959] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.281s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.540328] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.998s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.540556] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.571347] env[62585]: INFO nova.scheduler.client.report [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance c8f23f36-b035-467e-959a-37fc0b6462ad [ 966.868688] env[62585]: DEBUG oslo_vmware.api [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187822} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.868974] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.869185] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.869378] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.869553] env[62585]: INFO nova.compute.manager [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 966.869796] env[62585]: DEBUG oslo.service.loopingcall [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.869999] env[62585]: DEBUG nova.compute.manager [-] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.870111] env[62585]: DEBUG nova.network.neutron [-] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.963758] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385035, 'name': Rename_Task, 'duration_secs': 0.167279} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.964167] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.964441] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff597246-3d1b-415f-a82c-a774655dcdb7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.973905] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 966.973905] env[62585]: value = "task-1385036" [ 966.973905] env[62585]: _type = "Task" [ 966.973905] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.988839] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.079060] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5bac68cc-d780-4d03-8e53-2da6e7dda089 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "c8f23f36-b035-467e-959a-37fc0b6462ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.995s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.300451] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.300451] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.300451] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.300917] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.300917] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.306430] env[62585]: INFO nova.compute.manager [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Terminating instance [ 967.308631] env[62585]: DEBUG nova.compute.manager [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 967.310008] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.310651] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3545bb-60d9-4b91-a994-43d97db9b490 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.322409] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.322859] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87989686-4cb7-4b6d-a635-65ab856872d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.330323] env[62585]: DEBUG nova.compute.manager [None req-59dbbe06-5239-44ae-88d0-e832636e8018 tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 967.330945] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53acac47-51fd-4f81-958d-1b277b1d1cb3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.337766] env[62585]: DEBUG oslo_vmware.api [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 967.337766] env[62585]: value = "task-1385037" [ 967.337766] env[62585]: _type = "Task" [ 967.337766] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.353578] env[62585]: DEBUG oslo_vmware.api [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.405413] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "65ed4088-2cc5-4c00-94af-f714ec608fd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.405794] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "65ed4088-2cc5-4c00-94af-f714ec608fd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.406116] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "65ed4088-2cc5-4c00-94af-f714ec608fd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.406706] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "65ed4088-2cc5-4c00-94af-f714ec608fd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.407131] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "65ed4088-2cc5-4c00-94af-f714ec608fd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.412685] env[62585]: DEBUG nova.compute.manager [req-587e9a27-f99e-430b-8234-710a080ecc8a req-0ad2a438-d158-48b6-bfcb-2db3daaec987 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Received event network-vif-deleted-bc3d19ab-ba98-4935-9e08-61c5df21be43 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.412907] env[62585]: INFO nova.compute.manager [req-587e9a27-f99e-430b-8234-710a080ecc8a req-0ad2a438-d158-48b6-bfcb-2db3daaec987 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Neutron deleted interface bc3d19ab-ba98-4935-9e08-61c5df21be43; detaching it from the instance and deleting it from the info cache [ 967.413241] env[62585]: DEBUG nova.network.neutron [req-587e9a27-f99e-430b-8234-710a080ecc8a req-0ad2a438-d158-48b6-bfcb-2db3daaec987 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.415443] env[62585]: INFO nova.compute.manager [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Terminating instance [ 967.421139] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "refresh_cache-65ed4088-2cc5-4c00-94af-f714ec608fd8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.421450] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquired lock "refresh_cache-65ed4088-2cc5-4c00-94af-f714ec608fd8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.421701] env[62585]: DEBUG nova.network.neutron [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.488379] env[62585]: DEBUG oslo_vmware.api [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385036, 'name': PowerOnVM_Task, 'duration_secs': 0.464755} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.488486] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.488730] env[62585]: INFO nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Took 6.94 seconds to spawn the instance on the hypervisor. [ 967.488917] env[62585]: DEBUG nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 967.489772] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1de8ec-8d5b-4b8d-9d13-ccb3cd4fb28f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.847105] env[62585]: DEBUG nova.network.neutron [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.848315] env[62585]: DEBUG oslo_vmware.api [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385037, 'name': PowerOffVM_Task, 'duration_secs': 0.293754} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.849020] env[62585]: INFO nova.compute.manager [None req-59dbbe06-5239-44ae-88d0-e832636e8018 tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] instance snapshotting [ 967.849642] env[62585]: DEBUG nova.objects.instance [None req-59dbbe06-5239-44ae-88d0-e832636e8018 tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lazy-loading 'flavor' on Instance uuid 65ed4088-2cc5-4c00-94af-f714ec608fd8 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.851051] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.851279] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 967.851881] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cf4fa2a-8d73-41c9-81d8-9cbd02d3f6f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.869947] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.870248] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.870570] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.870657] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.870902] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.873779] env[62585]: INFO nova.compute.manager [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Terminating instance [ 967.875732] env[62585]: DEBUG nova.compute.manager [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 967.876069] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.877685] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473dbaa3-2ee4-4805-b8ee-e042e8e54982 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.887651] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.887944] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f76b9cb6-e7ce-4327-aefe-72fb13faa183 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.896100] env[62585]: DEBUG oslo_vmware.api [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 967.896100] env[62585]: value = "task-1385039" [ 967.896100] env[62585]: _type = "Task" [ 967.896100] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.908038] env[62585]: DEBUG oslo_vmware.api [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.919506] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d2b8462-42b2-4045-9542-f9ab9268fdc1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.922338] env[62585]: DEBUG nova.network.neutron [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Successfully updated port: d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 967.926886] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 967.926886] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 967.926886] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleting the datastore file [datastore1] abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.926886] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ff91fc7-926c-41e1-806d-f17be355d2c8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.934890] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b71d61-4284-4ae4-9a02-5728eaea0037 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.947907] env[62585]: DEBUG oslo_vmware.api [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for the task: (returnval){ [ 967.947907] env[62585]: value = "task-1385040" [ 967.947907] env[62585]: _type = "Task" [ 967.947907] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.948770] env[62585]: DEBUG nova.network.neutron [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.963040] env[62585]: DEBUG oslo_vmware.api [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.969836] env[62585]: DEBUG nova.compute.manager [req-587e9a27-f99e-430b-8234-710a080ecc8a req-0ad2a438-d158-48b6-bfcb-2db3daaec987 service nova] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Detach interface failed, port_id=bc3d19ab-ba98-4935-9e08-61c5df21be43, reason: Instance 6057e13b-71df-458d-b6ed-c139a8c57836 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 968.010444] env[62585]: INFO nova.compute.manager [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Took 17.01 seconds to build instance. [ 968.012107] env[62585]: DEBUG nova.network.neutron [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.261816] env[62585]: DEBUG nova.network.neutron [-] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.352726] env[62585]: INFO nova.compute.manager [-] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Took 2.11 seconds to deallocate network for instance. [ 968.361872] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8917c828-bab8-4c9b-9e5d-04c50e31af47 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.396255] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37278348-a466-44ea-abca-47c7eab48ff2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.415124] env[62585]: DEBUG oslo_vmware.api [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385039, 'name': PowerOffVM_Task, 'duration_secs': 0.231098} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.415905] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.416059] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.416339] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f03e4be-fae8-4950-8254-e5ee30e3c87b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.426954] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.431135] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.431135] env[62585]: DEBUG nova.network.neutron [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.462151] env[62585]: DEBUG oslo_vmware.api [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Task: {'id': task-1385040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278159} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.462979] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.463170] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 968.463346] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 968.463513] env[62585]: INFO nova.compute.manager [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Took 1.15 seconds to destroy the instance on the hypervisor. [ 968.463759] env[62585]: DEBUG oslo.service.loopingcall [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.463947] env[62585]: DEBUG nova.compute.manager [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 968.464051] env[62585]: DEBUG nova.network.neutron [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.508997] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.509250] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.509505] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleting the datastore file [datastore1] e4edc1dd-52ea-428e-832a-b49d3bc4fe14 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.509699] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e15c815-6dfe-4f67-a530-809741f88a27 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.515037] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b9f36dc4-8c62-4a01-b45a-64d94b2547f8 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.524s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.515453] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Releasing lock "refresh_cache-65ed4088-2cc5-4c00-94af-f714ec608fd8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.515830] env[62585]: DEBUG nova.compute.manager [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 968.516036] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.518403] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3370bfae-3d6d-4b82-97ac-bb9b65009067 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.528072] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.531918] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7ae90ea-43d4-415e-8e41-9a2c1ec01754 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.534970] env[62585]: DEBUG oslo_vmware.api [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 968.534970] env[62585]: value = "task-1385042" [ 968.534970] env[62585]: _type = "Task" [ 968.534970] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.538952] env[62585]: DEBUG oslo_vmware.api [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 968.538952] env[62585]: value = "task-1385043" [ 968.538952] env[62585]: _type = "Task" [ 968.538952] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.547953] env[62585]: DEBUG oslo_vmware.api [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.555942] env[62585]: DEBUG oslo_vmware.api [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385043, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.763983] env[62585]: INFO nova.compute.manager [-] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Took 1.89 seconds to deallocate network for instance. [ 968.875194] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.875517] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.875742] env[62585]: DEBUG nova.objects.instance [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lazy-loading 'resources' on Instance uuid 6057e13b-71df-458d-b6ed-c139a8c57836 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.919026] env[62585]: DEBUG nova.compute.manager [None req-59dbbe06-5239-44ae-88d0-e832636e8018 tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Instance disappeared during snapshot {{(pid=62585) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 968.963661] env[62585]: DEBUG nova.network.neutron [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.046933] env[62585]: DEBUG oslo_vmware.api [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372316} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.047978] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.048382] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.048685] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.049077] env[62585]: INFO nova.compute.manager [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Took 1.17 seconds to destroy the instance on the hypervisor. [ 969.049457] env[62585]: DEBUG oslo.service.loopingcall [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.055146] env[62585]: DEBUG nova.compute.manager [-] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 969.055337] env[62585]: DEBUG nova.network.neutron [-] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.057593] env[62585]: DEBUG oslo_vmware.api [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385043, 'name': PowerOffVM_Task, 'duration_secs': 0.129387} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.058115] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.058502] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.059261] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d785ccdb-377e-4e0e-a312-53ef5b29f5b5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.074289] env[62585]: DEBUG nova.compute.manager [None req-59dbbe06-5239-44ae-88d0-e832636e8018 tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Found 0 images (rotation: 2) {{(pid=62585) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 969.109203] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.109436] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.109621] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Deleting the datastore file [datastore2] 65ed4088-2cc5-4c00-94af-f714ec608fd8 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.111075] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcd0efac-9323-4edb-870c-365320d18ab9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.118292] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.118795] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.125621] env[62585]: DEBUG oslo_vmware.api [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for the task: (returnval){ [ 969.125621] env[62585]: value = "task-1385045" [ 969.125621] env[62585]: _type = "Task" [ 969.125621] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.138580] env[62585]: DEBUG oslo_vmware.api [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.143543] env[62585]: DEBUG nova.network.neutron [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Updating instance_info_cache with network_info: [{"id": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "address": "fa:16:3e:7a:5c:1a", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd006570b-3b", "ovs_interfaceid": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.235967] env[62585]: DEBUG nova.network.neutron [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.270516] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.451243] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Received event network-vif-plugged-d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.451686] env[62585]: DEBUG oslo_concurrency.lockutils [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] Acquiring lock "b7b8338a-2e9f-4854-8f4d-ede21b150317-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.451936] env[62585]: DEBUG oslo_concurrency.lockutils [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.452134] env[62585]: DEBUG oslo_concurrency.lockutils [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.452332] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] No waiting events found dispatching network-vif-plugged-d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 969.452470] env[62585]: WARNING nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Received unexpected event network-vif-plugged-d006570b-3bb8-443a-8eb9-f4d5dcc7c366 for instance with vm_state building and task_state spawning. [ 969.452672] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Received event network-vif-deleted-fc94e060-472d-4599-81dc-e0fb45aaa2ce {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.452841] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Received event network-changed-d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.452964] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Refreshing instance network info cache due to event network-changed-d006570b-3bb8-443a-8eb9-f4d5dcc7c366. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 969.453177] env[62585]: DEBUG oslo_concurrency.lockutils [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] Acquiring lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.493290] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72531c8e-18fb-4fb6-bc22-6a1e12aa265a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.504087] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e48ef4c-3938-41c7-9e55-984db45625d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.534062] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f091d3-cf48-494f-881a-1c06cb125c4d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.542148] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b045a8-166d-4165-a8c4-eac475bcb65c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.555945] env[62585]: DEBUG nova.compute.provider_tree [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.621522] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 969.636432] env[62585]: DEBUG oslo_vmware.api [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Task: {'id': task-1385045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242491} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.636749] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.636973] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.637196] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.637408] env[62585]: INFO nova.compute.manager [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 969.637713] env[62585]: DEBUG oslo.service.loopingcall [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.638026] env[62585]: DEBUG nova.compute.manager [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 969.638026] env[62585]: DEBUG nova.network.neutron [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.646565] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.646881] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Instance network_info: |[{"id": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "address": "fa:16:3e:7a:5c:1a", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd006570b-3b", "ovs_interfaceid": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 969.647183] env[62585]: DEBUG oslo_concurrency.lockutils [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] Acquired lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.647362] env[62585]: DEBUG nova.network.neutron [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Refreshing network info cache for port d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.648636] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:5c:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86b8f7fc-c105-4bcb-a4ec-c363ed38b17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd006570b-3bb8-443a-8eb9-f4d5dcc7c366', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 969.656066] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Creating folder: Project (ca61db1ff3fb4f5cae3dc18e70af8ba7). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 969.657892] env[62585]: DEBUG nova.network.neutron [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.658937] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22d4f9f7-1f75-4d20-b8fa-db01c1987460 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.671015] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Created folder: Project (ca61db1ff3fb4f5cae3dc18e70af8ba7) in parent group-v293962. [ 969.671232] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Creating folder: Instances. Parent ref: group-v294059. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 969.671455] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c382d21-5ecb-49c2-9de1-80fd4e8120d1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.680966] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Created folder: Instances in parent group-v294059. [ 969.681221] env[62585]: DEBUG oslo.service.loopingcall [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.681467] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 969.681910] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc6c1105-c16b-4779-9d44-a5b3c7b260f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.701156] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 969.701156] env[62585]: value = "task-1385048" [ 969.701156] env[62585]: _type = "Task" [ 969.701156] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.709209] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385048, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.739299] env[62585]: INFO nova.compute.manager [-] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Took 1.27 seconds to deallocate network for instance. [ 969.793564] env[62585]: DEBUG nova.compute.manager [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 969.794607] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce1e9b8-bb26-40c9-a245-e3d7bdd5b4d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.813221] env[62585]: DEBUG nova.network.neutron [-] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.060134] env[62585]: DEBUG nova.scheduler.client.report [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.140472] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.161336] env[62585]: DEBUG nova.network.neutron [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.212237] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385048, 'name': CreateVM_Task, 'duration_secs': 0.376008} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.212369] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.213118] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.213336] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.213698] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 970.213911] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1f56e6c-d4df-4633-804d-7ea81751f49b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.219349] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 970.219349] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e3298d-5a62-cd19-c49e-c013e55af9d4" [ 970.219349] env[62585]: _type = "Task" [ 970.219349] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.228312] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e3298d-5a62-cd19-c49e-c013e55af9d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.247790] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.308127] env[62585]: INFO nova.compute.manager [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] instance snapshotting [ 970.313590] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e717eac9-24a0-4418-b035-510f2c1a846e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.316185] env[62585]: INFO nova.compute.manager [-] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Took 1.26 seconds to deallocate network for instance. [ 970.335474] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7fb0a1-a885-4660-9cfd-b3550cd385a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.383758] env[62585]: DEBUG nova.network.neutron [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Updated VIF entry in instance network info cache for port d006570b-3bb8-443a-8eb9-f4d5dcc7c366. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.384129] env[62585]: DEBUG nova.network.neutron [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Updating instance_info_cache with network_info: [{"id": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "address": "fa:16:3e:7a:5c:1a", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd006570b-3b", "ovs_interfaceid": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.564587] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.566950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.296s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.567220] env[62585]: DEBUG nova.objects.instance [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'resources' on Instance uuid a26fb190-e6e6-48ab-a1d6-c662421a965f {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.582647] env[62585]: INFO nova.scheduler.client.report [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Deleted allocations for instance 6057e13b-71df-458d-b6ed-c139a8c57836 [ 970.664496] env[62585]: INFO nova.compute.manager [-] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Took 1.03 seconds to deallocate network for instance. [ 970.732107] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e3298d-5a62-cd19-c49e-c013e55af9d4, 'name': SearchDatastore_Task, 'duration_secs': 0.014042} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.732439] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.732672] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.733088] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.733088] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.733241] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.733500] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-277aac8d-e9e5-4c42-ab5c-d2ce2ccb05e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.744239] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.744421] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 970.745155] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5c147d8-d803-4e8f-87db-1a046633f64e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.750470] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 970.750470] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52242755-7d1f-d0c8-7a66-80cf35cd5450" [ 970.750470] env[62585]: _type = "Task" [ 970.750470] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.757806] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52242755-7d1f-d0c8-7a66-80cf35cd5450, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.835955] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.847058] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Creating Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 970.847335] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-98758850-308a-49a6-952b-a8905ff05cc3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.856530] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 970.856530] env[62585]: value = "task-1385049" [ 970.856530] env[62585]: _type = "Task" [ 970.856530] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.864908] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385049, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.886528] env[62585]: DEBUG oslo_concurrency.lockutils [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] Releasing lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.886778] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Received event network-vif-deleted-c32c8966-edf9-44a6-9263-00c85e124ab0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.886968] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Received event network-vif-deleted-6b248ce1-f858-4267-9e57-0d5110b02dc5 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.887182] env[62585]: INFO nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Neutron deleted interface 6b248ce1-f858-4267-9e57-0d5110b02dc5; detaching it from the instance and deleting it from the info cache [ 970.887377] env[62585]: DEBUG nova.network.neutron [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.091228] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e1fe854f-0877-4e06-a58f-fc05f1bab5e4 tempest-AttachVolumeShelveTestJSON-1622162023 tempest-AttachVolumeShelveTestJSON-1622162023-project-member] Lock "6057e13b-71df-458d-b6ed-c139a8c57836" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.002s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.170950] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50be052-b46d-409e-a92e-90f3210ed730 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.174861] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.180641] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe6b768-cf3e-45d1-b668-1b392120ded0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.211499] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76cb03f-2554-43b5-9dbe-1288742f3946 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.220310] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e227d4-e457-4853-9a35-b9aa28369819 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.235121] env[62585]: DEBUG nova.compute.provider_tree [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.261338] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52242755-7d1f-d0c8-7a66-80cf35cd5450, 'name': SearchDatastore_Task, 'duration_secs': 0.043462} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.262144] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c72be69-e56a-45a8-8f96-0aa07d6059f5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.267806] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 971.267806] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526a7ae6-7d21-2c71-aec4-21e69c40f2c7" [ 971.267806] env[62585]: _type = "Task" [ 971.267806] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.275975] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526a7ae6-7d21-2c71-aec4-21e69c40f2c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.369683] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385049, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.389955] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-052a1f08-6c35-44a8-bb8d-f3f522b91587 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.401449] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3124be-1fab-4470-be58-494bd170c95c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.426728] env[62585]: DEBUG nova.compute.manager [req-8fb15050-32e4-40c6-b589-b974cb4613f8 req-7ec57c48-3531-4382-bfb3-503faec727c1 service nova] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Detach interface failed, port_id=6b248ce1-f858-4267-9e57-0d5110b02dc5, reason: Instance e4edc1dd-52ea-428e-832a-b49d3bc4fe14 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 971.738122] env[62585]: DEBUG nova.scheduler.client.report [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 971.778711] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526a7ae6-7d21-2c71-aec4-21e69c40f2c7, 'name': SearchDatastore_Task, 'duration_secs': 0.012252} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.779025] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.779294] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/b7b8338a-2e9f-4854-8f4d-ede21b150317.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 971.779557] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60e017a2-37cc-4c3f-bf29-12c2ecaaa8b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.786713] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 971.786713] env[62585]: value = "task-1385050" [ 971.786713] env[62585]: _type = "Task" [ 971.786713] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.795030] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.868773] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385049, 'name': CreateSnapshot_Task, 'duration_secs': 0.643576} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.869154] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Created Snapshot of the VM instance {{(pid=62585) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 971.869948] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502f694c-c687-4297-af0b-e26310443c84 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.243652] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.246482] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.106s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.248396] env[62585]: INFO nova.compute.claims [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.267713] env[62585]: INFO nova.scheduler.client.report [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocations for instance a26fb190-e6e6-48ab-a1d6-c662421a965f [ 972.297337] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385050, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.388968] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Creating linked-clone VM from snapshot {{(pid=62585) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 972.388968] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f4ef4de7-daec-4baa-9c53-1303b65afde1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.399227] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 972.399227] env[62585]: value = "task-1385051" [ 972.399227] env[62585]: _type = "Task" [ 972.399227] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.408679] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385051, 'name': CloneVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.777670] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fdd8b958-d6ad-47cb-96e5-5add37103cd2 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "a26fb190-e6e6-48ab-a1d6-c662421a965f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.531s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.801689] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519066} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.801689] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/b7b8338a-2e9f-4854-8f4d-ede21b150317.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.801689] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.801689] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0dd93b3-a450-4958-ba89-360ed7ef6864 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.811990] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 972.811990] env[62585]: value = "task-1385052" [ 972.811990] env[62585]: _type = "Task" [ 972.811990] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.822068] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385052, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.912030] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385051, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.324295] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385052, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084628} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.328283] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 973.328283] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee09f496-e472-458c-8012-adb1544c15ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.352695] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/b7b8338a-2e9f-4854-8f4d-ede21b150317.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.355461] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37c8d545-722e-4866-a327-f48ab4753c86 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.376297] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 973.376297] env[62585]: value = "task-1385053" [ 973.376297] env[62585]: _type = "Task" [ 973.376297] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.380735] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36518d1f-b7b5-48a8-9869-6cb987d108ce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.388523] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.391128] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592be9a8-5126-4f9d-952a-afe64736883f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.425107] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3df480-c7ce-49ee-9335-f212843cb072 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.436545] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af59247-6256-4651-b684-2b4333a54ff0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.440594] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385051, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.451292] env[62585]: DEBUG nova.compute.provider_tree [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.771587] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.771820] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.888111] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385053, 'name': ReconfigVM_Task, 'duration_secs': 0.29494} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.888727] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Reconfigured VM instance instance-00000060 to attach disk [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/b7b8338a-2e9f-4854-8f4d-ede21b150317.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.889016] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-589ce792-291f-4df1-a280-a01c8a07f667 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.897324] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 973.897324] env[62585]: value = "task-1385054" [ 973.897324] env[62585]: _type = "Task" [ 973.897324] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.906144] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385054, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.931454] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385051, 'name': CloneVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.954846] env[62585]: DEBUG nova.scheduler.client.report [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.274079] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 974.408576] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385054, 'name': Rename_Task, 'duration_secs': 0.162703} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.409256] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.409256] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f9e0bc5-ed4c-4780-b046-5deb87b37313 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.417310] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 974.417310] env[62585]: value = "task-1385055" [ 974.417310] env[62585]: _type = "Task" [ 974.417310] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.429705] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.439422] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385051, 'name': CloneVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.460263] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.460946] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 974.464235] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.216s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.464700] env[62585]: DEBUG nova.objects.instance [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lazy-loading 'resources' on Instance uuid abf4a205-fcee-46e4-85b6-10a452cc0312 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.796091] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.932659] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385055, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.937591] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385051, 'name': CloneVM_Task, 'duration_secs': 2.105851} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.937591] env[62585]: INFO nova.virt.vmwareapi.vmops [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Created linked-clone VM from snapshot [ 974.937591] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aa7f9b-7b24-4fef-b3b4-ae9bb36b92fd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.945422] env[62585]: DEBUG nova.virt.vmwareapi.images [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Uploading image 9e8956e5-150f-4d87-a95c-1cd92e9f51bf {{(pid=62585) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 974.958404] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Destroying the VM {{(pid=62585) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 974.958694] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9b6706eb-d896-43ba-9d84-726a8d0234ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.968035] env[62585]: DEBUG nova.compute.utils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 974.969463] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 974.969463] env[62585]: value = "task-1385056" [ 974.969463] env[62585]: _type = "Task" [ 974.969463] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.970723] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 974.970973] env[62585]: DEBUG nova.network.neutron [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 974.987281] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385056, 'name': Destroy_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.019149] env[62585]: DEBUG nova.policy [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '61f1c1d7e5e04485816dea7af8a8dae8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c03535f541de4bc8bae94238a9f34750', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 975.093953] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ac1369-cb62-49c4-8e39-5aaa86ef680b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.102514] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc23372-dd95-4942-800c-c1dfa97bb31c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.134313] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef51ff6-2b41-4fc3-8af8-5a1ef9dd5987 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.142613] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84953cf3-f6bb-4fdb-95f8-69f0bf359bd9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.156792] env[62585]: DEBUG nova.compute.provider_tree [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.371448] env[62585]: DEBUG nova.network.neutron [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Successfully created port: 8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.438083] env[62585]: DEBUG oslo_vmware.api [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385055, 'name': PowerOnVM_Task, 'duration_secs': 0.644286} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.438083] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 975.438083] env[62585]: INFO nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Took 9.15 seconds to spawn the instance on the hypervisor. [ 975.438083] env[62585]: DEBUG nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 975.438083] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea7d1e3-9653-4dfb-95af-2dd4f22a40b0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.473724] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 975.487771] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385056, 'name': Destroy_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.660939] env[62585]: DEBUG nova.scheduler.client.report [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 975.953918] env[62585]: INFO nova.compute.manager [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Took 16.87 seconds to build instance. [ 975.990198] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385056, 'name': Destroy_Task, 'duration_secs': 0.610207} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.990694] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Destroyed the VM [ 975.991111] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Deleting Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 975.991487] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fd1b2e8d-e1b3-4d48-9dd0-b1cb08a37056 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.001388] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 976.001388] env[62585]: value = "task-1385057" [ 976.001388] env[62585]: _type = "Task" [ 976.001388] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.011126] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385057, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.165668] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.702s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.168405] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.332s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.168652] env[62585]: DEBUG nova.objects.instance [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lazy-loading 'resources' on Instance uuid e4edc1dd-52ea-428e-832a-b49d3bc4fe14 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.195064] env[62585]: INFO nova.scheduler.client.report [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Deleted allocations for instance abf4a205-fcee-46e4-85b6-10a452cc0312 [ 976.455804] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4704f67e-7fc7-4e6e-b1d3-b419c28180fb tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.385s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.485303] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 976.517237] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385057, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.519289] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 976.519521] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 976.519682] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.519997] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 976.519997] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.520170] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 976.520377] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 976.520536] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 976.520703] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 976.521044] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 976.521153] env[62585]: DEBUG nova.virt.hardware [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 976.522259] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0553ae04-2360-4d19-afa5-b640dcf78e33 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.531599] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb658435-0cf0-42ee-b936-87456cb3266e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.709791] env[62585]: DEBUG oslo_concurrency.lockutils [None req-7b621fab-0875-4a7f-b582-6679fc7080c5 tempest-ServersNegativeTestJSON-310901189 tempest-ServersNegativeTestJSON-310901189-project-member] Lock "abf4a205-fcee-46e4-85b6-10a452cc0312" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.409s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.813073] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4a0cce-ba99-48d0-a012-46974da75944 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.823127] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c32318-ebf1-4ff8-83be-4f08b79757dd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.858607] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a5a420-8ee2-4cad-86ad-4f9b7942ea93 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.866875] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9ab2bb-d4c8-439d-8af4-3f604f8f1770 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.882353] env[62585]: DEBUG nova.compute.provider_tree [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.012053] env[62585]: DEBUG oslo_vmware.api [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385057, 'name': RemoveSnapshot_Task, 'duration_secs': 0.694797} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.012469] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Deleted Snapshot of the VM instance {{(pid=62585) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 977.192416] env[62585]: DEBUG nova.compute.manager [req-c017206d-4d28-4037-963c-cf731f22a08f req-1c350e53-1e8b-47fd-a751-7466269f06b6 service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Received event network-vif-plugged-8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.192416] env[62585]: DEBUG oslo_concurrency.lockutils [req-c017206d-4d28-4037-963c-cf731f22a08f req-1c350e53-1e8b-47fd-a751-7466269f06b6 service nova] Acquiring lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.192416] env[62585]: DEBUG oslo_concurrency.lockutils [req-c017206d-4d28-4037-963c-cf731f22a08f req-1c350e53-1e8b-47fd-a751-7466269f06b6 service nova] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.192416] env[62585]: DEBUG oslo_concurrency.lockutils [req-c017206d-4d28-4037-963c-cf731f22a08f req-1c350e53-1e8b-47fd-a751-7466269f06b6 service nova] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.192416] env[62585]: DEBUG nova.compute.manager [req-c017206d-4d28-4037-963c-cf731f22a08f req-1c350e53-1e8b-47fd-a751-7466269f06b6 service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] No waiting events found dispatching network-vif-plugged-8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 977.192416] env[62585]: WARNING nova.compute.manager [req-c017206d-4d28-4037-963c-cf731f22a08f req-1c350e53-1e8b-47fd-a751-7466269f06b6 service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Received unexpected event network-vif-plugged-8d5c80a7-e25e-414e-a45f-a43d747618bc for instance with vm_state building and task_state spawning. [ 977.304577] env[62585]: INFO nova.compute.manager [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Rescuing [ 977.304948] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.305119] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.305552] env[62585]: DEBUG nova.network.neutron [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.371242] env[62585]: DEBUG nova.network.neutron [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Successfully updated port: 8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.388018] env[62585]: DEBUG nova.scheduler.client.report [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 977.521331] env[62585]: WARNING nova.compute.manager [None req-5b6bb4e1-311e-456d-aa65-a6ca84c159d2 tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Image not found during snapshot: nova.exception.ImageNotFound: Image 9e8956e5-150f-4d87-a95c-1cd92e9f51bf could not be found. [ 977.874421] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "refresh_cache-8d84e240-2dc3-4680-9ee7-b705d4e7749a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.874747] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquired lock "refresh_cache-8d84e240-2dc3-4680-9ee7-b705d4e7749a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.876100] env[62585]: DEBUG nova.network.neutron [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.894430] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.895112] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.720s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.895208] env[62585]: DEBUG nova.objects.instance [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lazy-loading 'resources' on Instance uuid 65ed4088-2cc5-4c00-94af-f714ec608fd8 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.916243] env[62585]: INFO nova.scheduler.client.report [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance e4edc1dd-52ea-428e-832a-b49d3bc4fe14 [ 978.102072] env[62585]: DEBUG nova.network.neutron [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Updating instance_info_cache with network_info: [{"id": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "address": "fa:16:3e:7a:5c:1a", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd006570b-3b", "ovs_interfaceid": "d006570b-3bb8-443a-8eb9-f4d5dcc7c366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.432826] env[62585]: DEBUG oslo_concurrency.lockutils [None req-5fdb5dab-e8cd-423a-b735-cad96409cd99 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "e4edc1dd-52ea-428e-832a-b49d3bc4fe14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.562s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.434793] env[62585]: DEBUG nova.network.neutron [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.507028] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a066e4-9c11-4ef3-9ca2-c5f3f239d0a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.517047] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6a3a3e-fa3e-4fd6-873c-051318b1f9f4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.557824] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489e794c-870b-4dae-ac28-83474cc1eb6d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.567066] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1c6f4e-3792-4c05-be70-af50629b3b1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.582692] env[62585]: DEBUG nova.compute.provider_tree [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.604243] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "refresh_cache-b7b8338a-2e9f-4854-8f4d-ede21b150317" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.695914] env[62585]: DEBUG nova.network.neutron [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Updating instance_info_cache with network_info: [{"id": "8d5c80a7-e25e-414e-a45f-a43d747618bc", "address": "fa:16:3e:3f:04:4b", "network": {"id": "cbc85ee9-6069-447d-acd0-f908ce1a4131", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1556321924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c03535f541de4bc8bae94238a9f34750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d5c80a7-e2", "ovs_interfaceid": "8d5c80a7-e25e-414e-a45f-a43d747618bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.779331] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "b0885bdd-bc8d-4311-8388-54bdc22144c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.779605] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.779820] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "b0885bdd-bc8d-4311-8388-54bdc22144c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.780018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.780195] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.783242] env[62585]: INFO nova.compute.manager [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Terminating instance [ 978.784876] env[62585]: DEBUG nova.compute.manager [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 978.785102] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 978.786047] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed90f2a1-623e-444c-a44e-232f804a40be {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.795716] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 978.795992] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1914818f-f809-40a6-9f94-7f612ab83bca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.805671] env[62585]: DEBUG oslo_vmware.api [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 978.805671] env[62585]: value = "task-1385058" [ 978.805671] env[62585]: _type = "Task" [ 978.805671] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.822662] env[62585]: DEBUG oslo_vmware.api [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.087319] env[62585]: DEBUG nova.scheduler.client.report [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.151139] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.151446] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8931bac8-fb7b-4ecd-8104-c803864c2652 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.159695] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 979.159695] env[62585]: value = "task-1385059" [ 979.159695] env[62585]: _type = "Task" [ 979.159695] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.170363] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385059, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.198714] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Releasing lock "refresh_cache-8d84e240-2dc3-4680-9ee7-b705d4e7749a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.199258] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Instance network_info: |[{"id": "8d5c80a7-e25e-414e-a45f-a43d747618bc", "address": "fa:16:3e:3f:04:4b", "network": {"id": "cbc85ee9-6069-447d-acd0-f908ce1a4131", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1556321924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c03535f541de4bc8bae94238a9f34750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d5c80a7-e2", "ovs_interfaceid": "8d5c80a7-e25e-414e-a45f-a43d747618bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 979.199701] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:04:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d5c80a7-e25e-414e-a45f-a43d747618bc', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.207299] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Creating folder: Project (c03535f541de4bc8bae94238a9f34750). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.207584] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35e2ddca-8eb2-40ba-b347-f9d67f6ceebf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.221819] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Created folder: Project (c03535f541de4bc8bae94238a9f34750) in parent group-v293962. [ 979.222076] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Creating folder: Instances. Parent ref: group-v294064. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.222334] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c14e3533-e1d1-49c4-b284-b63f1b9b2917 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.234563] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Created folder: Instances in parent group-v294064. [ 979.234889] env[62585]: DEBUG oslo.service.loopingcall [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.235131] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.235358] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e73dfb3-345c-4fcd-b97b-c61d8ceb4267 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.257124] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.257124] env[62585]: value = "task-1385062" [ 979.257124] env[62585]: _type = "Task" [ 979.257124] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.270493] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385062, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.273262] env[62585]: DEBUG nova.compute.manager [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Received event network-changed-8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 979.273457] env[62585]: DEBUG nova.compute.manager [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Refreshing instance network info cache due to event network-changed-8d5c80a7-e25e-414e-a45f-a43d747618bc. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 979.273679] env[62585]: DEBUG oslo_concurrency.lockutils [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] Acquiring lock "refresh_cache-8d84e240-2dc3-4680-9ee7-b705d4e7749a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.273823] env[62585]: DEBUG oslo_concurrency.lockutils [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] Acquired lock "refresh_cache-8d84e240-2dc3-4680-9ee7-b705d4e7749a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.274091] env[62585]: DEBUG nova.network.neutron [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Refreshing network info cache for port 8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 979.317558] env[62585]: DEBUG oslo_vmware.api [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385058, 'name': PowerOffVM_Task, 'duration_secs': 0.209681} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.317809] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.317970] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 979.318255] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54511553-3177-4d70-a77f-8f57fb014f26 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.384677] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 979.384884] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 979.384941] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleting the datastore file [datastore2] b0885bdd-bc8d-4311-8388-54bdc22144c2 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.385695] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9c79fbe-d287-4d4b-b8ab-ca2ae4b22728 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.395615] env[62585]: DEBUG oslo_vmware.api [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for the task: (returnval){ [ 979.395615] env[62585]: value = "task-1385064" [ 979.395615] env[62585]: _type = "Task" [ 979.395615] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.415701] env[62585]: DEBUG oslo_vmware.api [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.591847] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.594394] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.798s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.595680] env[62585]: INFO nova.compute.claims [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.609135] env[62585]: INFO nova.scheduler.client.report [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Deleted allocations for instance 65ed4088-2cc5-4c00-94af-f714ec608fd8 [ 979.674386] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385059, 'name': PowerOffVM_Task, 'duration_secs': 0.236666} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.674662] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.675462] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4b9c20-95f2-4b6b-b533-47148e87aeef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.696830] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d173b9f6-ba46-496e-9cbc-1da4200770bf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.702107] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.702330] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.732162] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.732699] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ef186aa-33ab-4880-a488-656ad9cc0315 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.741044] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 979.741044] env[62585]: value = "task-1385065" [ 979.741044] env[62585]: _type = "Task" [ 979.741044] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.749811] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.767022] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385062, 'name': CreateVM_Task, 'duration_secs': 0.347139} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.767022] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.767199] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.767366] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.767689] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 979.767929] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c28250c4-2d3e-4058-a15e-5ac6b6ba8971 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.773907] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 979.773907] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a6c008-85c1-523c-4dea-bafb7ea256a6" [ 979.773907] env[62585]: _type = "Task" [ 979.773907] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.786066] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a6c008-85c1-523c-4dea-bafb7ea256a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.786364] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.786631] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.786903] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.787074] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.787264] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.787533] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d5f3247-74d1-4fe2-a450-4912a157cd44 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.797667] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.797849] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.798557] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d12e4e04-9e4c-46c3-8996-83516f5fcc66 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.804245] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 979.804245] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5262dda7-da26-56a0-323c-efdebdfecea3" [ 979.804245] env[62585]: _type = "Task" [ 979.804245] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.813836] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5262dda7-da26-56a0-323c-efdebdfecea3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.906997] env[62585]: DEBUG oslo_vmware.api [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Task: {'id': task-1385064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149363} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.907720] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 979.907720] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 979.907834] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 979.908037] env[62585]: INFO nova.compute.manager [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 979.908307] env[62585]: DEBUG oslo.service.loopingcall [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.908510] env[62585]: DEBUG nova.compute.manager [-] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 979.908621] env[62585]: DEBUG nova.network.neutron [-] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 979.967105] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.967357] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.050196] env[62585]: DEBUG nova.network.neutron [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Updated VIF entry in instance network info cache for port 8d5c80a7-e25e-414e-a45f-a43d747618bc. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 980.050567] env[62585]: DEBUG nova.network.neutron [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Updating instance_info_cache with network_info: [{"id": "8d5c80a7-e25e-414e-a45f-a43d747618bc", "address": "fa:16:3e:3f:04:4b", "network": {"id": "cbc85ee9-6069-447d-acd0-f908ce1a4131", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1556321924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c03535f541de4bc8bae94238a9f34750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d5c80a7-e2", "ovs_interfaceid": "8d5c80a7-e25e-414e-a45f-a43d747618bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.117330] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c2b58d3f-9f36-42eb-9c3b-696317a576cc tempest-ServersAaction247Test-107675771 tempest-ServersAaction247Test-107675771-project-member] Lock "65ed4088-2cc5-4c00-94af-f714ec608fd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.711s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.204811] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 980.252633] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 980.252858] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.253094] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.318569] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5262dda7-da26-56a0-323c-efdebdfecea3, 'name': SearchDatastore_Task, 'duration_secs': 0.008715} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.319464] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32cc03d9-b48d-48ff-8ae3-5fef599f66aa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.326065] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 980.326065] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bab13a-3d27-c3ab-6b32-b1d8f22fe147" [ 980.326065] env[62585]: _type = "Task" [ 980.326065] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.335720] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bab13a-3d27-c3ab-6b32-b1d8f22fe147, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.470384] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 980.553018] env[62585]: DEBUG oslo_concurrency.lockutils [req-a443d90c-6742-4d31-bb61-7d62b69faeb9 req-3c9f8003-11b1-47ce-a6e0-a63643d1218c service nova] Releasing lock "refresh_cache-8d84e240-2dc3-4680-9ee7-b705d4e7749a" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.688124] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a978558-351f-4035-8d39-285c31c1df17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.696502] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8a562c-25e6-4dbf-a48c-062cdb7f4581 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.729706] env[62585]: DEBUG nova.network.neutron [-] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.732026] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd8f158-9013-428b-a3cb-9fa0fb58a8e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.742873] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2f6da0-36ab-442c-bca5-83fbd6c411a0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.747627] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.759063] env[62585]: DEBUG nova.compute.provider_tree [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.836913] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bab13a-3d27-c3ab-6b32-b1d8f22fe147, 'name': SearchDatastore_Task, 'duration_secs': 0.013914} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.837197] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.837458] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 8d84e240-2dc3-4680-9ee7-b705d4e7749a/8d84e240-2dc3-4680-9ee7-b705d4e7749a.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.837733] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.837924] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.838154] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6285b006-d40f-4a75-9a7d-2c56bcac50fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.840047] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d199f6b-e9d0-4a34-b752-996d5e8d143f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.848766] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 980.848766] env[62585]: value = "task-1385066" [ 980.848766] env[62585]: _type = "Task" [ 980.848766] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.852796] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.852982] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.854048] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7325d713-0efe-4489-9e41-2164a40d4287 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.861181] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.865089] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 980.865089] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520284b7-522b-b316-51a6-1e061ece99bb" [ 980.865089] env[62585]: _type = "Task" [ 980.865089] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.873283] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520284b7-522b-b316-51a6-1e061ece99bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.992014] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.235762] env[62585]: INFO nova.compute.manager [-] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Took 1.33 seconds to deallocate network for instance. [ 981.261843] env[62585]: DEBUG nova.scheduler.client.report [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.304418] env[62585]: DEBUG nova.compute.manager [req-2685ad5c-ab2f-4376-afa0-96026b667ea9 req-606bf3db-3b50-4d63-abab-352f9f544208 service nova] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Received event network-vif-deleted-68e2a061-2351-4cbb-bc96-6898b8dac94e {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 981.358821] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478065} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.359090] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 8d84e240-2dc3-4680-9ee7-b705d4e7749a/8d84e240-2dc3-4680-9ee7-b705d4e7749a.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.359312] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.359554] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0da35a1b-e734-4197-99af-064c0ae70c9c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.366275] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 981.366275] env[62585]: value = "task-1385067" [ 981.366275] env[62585]: _type = "Task" [ 981.366275] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.379282] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]520284b7-522b-b316-51a6-1e061ece99bb, 'name': SearchDatastore_Task, 'duration_secs': 0.014102} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.382285] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.382552] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55318175-cac1-4c11-9e9c-576c0ac4fe78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.387620] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 981.387620] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529db3f9-3163-663d-153b-f27449c2d4ff" [ 981.387620] env[62585]: _type = "Task" [ 981.387620] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.395443] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529db3f9-3163-663d-153b-f27449c2d4ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.685365] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.685616] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.742731] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.768405] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.768920] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 981.771544] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.024s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.772866] env[62585]: INFO nova.compute.claims [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.879053] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072665} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.879333] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 981.880105] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb1a24f-3f70-43fc-ba6b-8a2ea2372896 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.901361] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 8d84e240-2dc3-4680-9ee7-b705d4e7749a/8d84e240-2dc3-4680-9ee7-b705d4e7749a.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.904305] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-982cafd7-d796-4a14-b7f2-33fd1bd0cd82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.924862] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529db3f9-3163-663d-153b-f27449c2d4ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009123} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.926022] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.926295] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. {{(pid=62585) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 981.926604] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 981.926604] env[62585]: value = "task-1385068" [ 981.926604] env[62585]: _type = "Task" [ 981.926604] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.926834] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d59f6f2-657f-4c7b-86f1-dd6489b53305 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.938073] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385068, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.939338] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 981.939338] env[62585]: value = "task-1385069" [ 981.939338] env[62585]: _type = "Task" [ 981.939338] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.947938] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.188636] env[62585]: DEBUG nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 982.280858] env[62585]: DEBUG nova.compute.utils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.283455] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 982.283680] env[62585]: DEBUG nova.network.neutron [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.355473] env[62585]: DEBUG nova.policy [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28834cc42f8a49cebca5647badabf8ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c49ab537d42244f495aaa3cbdaafc6b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.438538] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385068, 'name': ReconfigVM_Task, 'duration_secs': 0.367511} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.438819] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 8d84e240-2dc3-4680-9ee7-b705d4e7749a/8d84e240-2dc3-4680-9ee7-b705d4e7749a.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.439490] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e0d6b72-5a7e-412e-939c-3a9b27c7237b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.448957] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437849} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.450153] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. [ 982.450480] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 982.450480] env[62585]: value = "task-1385070" [ 982.450480] env[62585]: _type = "Task" [ 982.450480] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.451266] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa30d21a-9ac0-4609-be9c-77650fd0f28e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.478372] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.481834] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44b66a02-195c-4cbd-84e6-fe4cf0f24af1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.494691] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385070, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.502028] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 982.502028] env[62585]: value = "task-1385071" [ 982.502028] env[62585]: _type = "Task" [ 982.502028] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.511102] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.617653] env[62585]: DEBUG nova.network.neutron [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Successfully created port: 2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.714240] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.783610] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 982.892411] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a70b49-b904-4a50-9466-577e94fd78d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.900350] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfa8bb8-8e3b-4f55-87d3-ec5e6aa8e6bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.932433] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a81ac0-3459-4328-96cb-aedf399b4682 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.941751] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f232b0-d963-4090-88bc-bb8561515d95 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.958074] env[62585]: DEBUG nova.compute.provider_tree [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.966633] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385070, 'name': Rename_Task, 'duration_secs': 0.172915} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.967478] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 982.967715] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35a920e9-bfa7-4b92-a0f9-a1175f58c844 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.975218] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 982.975218] env[62585]: value = "task-1385072" [ 982.975218] env[62585]: _type = "Task" [ 982.975218] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.984652] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.018641] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385071, 'name': ReconfigVM_Task, 'duration_secs': 0.317445} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.018950] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Reconfigured VM instance instance-00000060 to attach disk [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.019925] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606ef351-45d8-462d-a5a6-4378c196ddeb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.045588] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c9f8a47-7d5c-4f07-913d-a1aff6d2b2a3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.062204] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 983.062204] env[62585]: value = "task-1385073" [ 983.062204] env[62585]: _type = "Task" [ 983.062204] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.071754] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385073, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.462517] env[62585]: DEBUG nova.scheduler.client.report [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.486567] env[62585]: DEBUG oslo_vmware.api [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385072, 'name': PowerOnVM_Task, 'duration_secs': 0.474163} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.486840] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 983.487053] env[62585]: INFO nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Took 7.00 seconds to spawn the instance on the hypervisor. [ 983.487238] env[62585]: DEBUG nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 983.487980] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ef0b8e-d994-42e8-bcaf-5800672a0a61 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.572249] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385073, 'name': ReconfigVM_Task, 'duration_secs': 0.211079} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.572571] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.572819] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a39cdc00-0c8a-4787-8d20-89c3b0ff81ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.579381] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 983.579381] env[62585]: value = "task-1385074" [ 983.579381] env[62585]: _type = "Task" [ 983.579381] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.587176] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.795726] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 983.821224] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.821521] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.821703] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.821893] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.822063] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.822248] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.822481] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.822650] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.822819] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.822982] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.823192] env[62585]: DEBUG nova.virt.hardware [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.824051] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f15aa65-0191-4aa6-9da2-354c875b274b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.832662] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d6fd78-ca6a-4272-b2ff-39029352fdc1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.967753] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.196s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.968322] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 983.971111] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.979s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.972872] env[62585]: INFO nova.compute.claims [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.008031] env[62585]: INFO nova.compute.manager [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Took 13.88 seconds to build instance. [ 984.091247] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385074, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.108957] env[62585]: DEBUG nova.compute.manager [req-9ab886c1-09a1-4dd5-a8f2-f0a02b3500e6 req-a212ace3-8ce1-4e06-ba94-eae0230af54c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Received event network-vif-plugged-2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.109208] env[62585]: DEBUG oslo_concurrency.lockutils [req-9ab886c1-09a1-4dd5-a8f2-f0a02b3500e6 req-a212ace3-8ce1-4e06-ba94-eae0230af54c service nova] Acquiring lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.109485] env[62585]: DEBUG oslo_concurrency.lockutils [req-9ab886c1-09a1-4dd5-a8f2-f0a02b3500e6 req-a212ace3-8ce1-4e06-ba94-eae0230af54c service nova] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.109691] env[62585]: DEBUG oslo_concurrency.lockutils [req-9ab886c1-09a1-4dd5-a8f2-f0a02b3500e6 req-a212ace3-8ce1-4e06-ba94-eae0230af54c service nova] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.109888] env[62585]: DEBUG nova.compute.manager [req-9ab886c1-09a1-4dd5-a8f2-f0a02b3500e6 req-a212ace3-8ce1-4e06-ba94-eae0230af54c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] No waiting events found dispatching network-vif-plugged-2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 984.110090] env[62585]: WARNING nova.compute.manager [req-9ab886c1-09a1-4dd5-a8f2-f0a02b3500e6 req-a212ace3-8ce1-4e06-ba94-eae0230af54c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Received unexpected event network-vif-plugged-2eb59df2-5648-46be-995c-88785a05be2a for instance with vm_state building and task_state spawning. [ 984.230509] env[62585]: DEBUG nova.network.neutron [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Successfully updated port: 2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.477271] env[62585]: DEBUG nova.compute.utils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 984.481019] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 984.481323] env[62585]: DEBUG nova.network.neutron [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.508277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-4387c477-2de7-4075-8e50-c9e2d0b4cd7b tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.390s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.521088] env[62585]: DEBUG nova.policy [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 984.594067] env[62585]: DEBUG oslo_vmware.api [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385074, 'name': PowerOnVM_Task, 'duration_secs': 0.760699} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.594431] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.597956] env[62585]: DEBUG nova.compute.manager [None req-9a9bf504-afe1-4ec3-b029-66e4a5902242 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 984.599074] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733c64aa-e5d9-49df-b960-3ef659b47d38 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.640562] env[62585]: DEBUG oslo_concurrency.lockutils [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "interface-8d84e240-2dc3-4680-9ee7-b705d4e7749a-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.640937] env[62585]: DEBUG oslo_concurrency.lockutils [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "interface-8d84e240-2dc3-4680-9ee7-b705d4e7749a-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.641379] env[62585]: DEBUG nova.objects.instance [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lazy-loading 'flavor' on Instance uuid 8d84e240-2dc3-4680-9ee7-b705d4e7749a {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.734715] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.734715] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.734715] env[62585]: DEBUG nova.network.neutron [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 984.943449] env[62585]: DEBUG nova.network.neutron [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Successfully created port: 6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.982493] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 985.105967] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ed4456-4ca5-457e-96c5-c34b58e5f818 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.118268] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea90bf2-ba47-4693-a778-16a9e740692e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.149549] env[62585]: DEBUG nova.objects.instance [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d84e240-2dc3-4680-9ee7-b705d4e7749a {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.151884] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10824e9-30d0-457f-b3a9-3085071386d6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.160740] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0168ff0e-d068-465d-8158-ef5b385c7bc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.175640] env[62585]: DEBUG nova.compute.provider_tree [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.288761] env[62585]: DEBUG nova.network.neutron [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.453582] env[62585]: DEBUG nova.network.neutron [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [{"id": "2eb59df2-5648-46be-995c-88785a05be2a", "address": "fa:16:3e:6a:e4:c5", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb59df2-56", "ovs_interfaceid": "2eb59df2-5648-46be-995c-88785a05be2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.655736] env[62585]: DEBUG nova.objects.base [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Object Instance<8d84e240-2dc3-4680-9ee7-b705d4e7749a> lazy-loaded attributes: flavor,pci_requests {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 985.656072] env[62585]: DEBUG nova.network.neutron [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.678531] env[62585]: DEBUG nova.scheduler.client.report [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 985.774361] env[62585]: DEBUG oslo_concurrency.lockutils [None req-adb4d795-1b18-4219-ba74-4f8029d1222a tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "interface-8d84e240-2dc3-4680-9ee7-b705d4e7749a-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.133s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.956474] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.956814] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Instance network_info: |[{"id": "2eb59df2-5648-46be-995c-88785a05be2a", "address": "fa:16:3e:6a:e4:c5", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb59df2-56", "ovs_interfaceid": "2eb59df2-5648-46be-995c-88785a05be2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 985.957289] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:e4:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2eb59df2-5648-46be-995c-88785a05be2a', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 985.965157] env[62585]: DEBUG oslo.service.loopingcall [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.965404] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 985.965635] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c3838a4-a853-4e76-adcd-a366e6a6318a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.986656] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 985.986656] env[62585]: value = "task-1385075" [ 985.986656] env[62585]: _type = "Task" [ 985.986656] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.995617] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 985.997640] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385075, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.021828] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.022109] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.022275] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.022465] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.022616] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.022772] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.023058] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.023181] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.023360] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.023527] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.023701] env[62585]: DEBUG nova.virt.hardware [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.024661] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52151d65-0ed1-41b6-8d54-aea6c4d2dd87 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.033240] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f928b7d-d7b6-4327-8814-2d186e0a0d0a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.043160] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.043398] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.164653] env[62585]: DEBUG nova.compute.manager [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Received event network-changed-2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 986.164858] env[62585]: DEBUG nova.compute.manager [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Refreshing instance network info cache due to event network-changed-2eb59df2-5648-46be-995c-88785a05be2a. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 986.165098] env[62585]: DEBUG oslo_concurrency.lockutils [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] Acquiring lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.165235] env[62585]: DEBUG oslo_concurrency.lockutils [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] Acquired lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.165402] env[62585]: DEBUG nova.network.neutron [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Refreshing network info cache for port 2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.183934] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.184462] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 986.186950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.444s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.187184] env[62585]: DEBUG nova.objects.instance [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lazy-loading 'resources' on Instance uuid b0885bdd-bc8d-4311-8388-54bdc22144c2 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.499146] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385075, 'name': CreateVM_Task, 'duration_secs': 0.36526} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.499361] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 986.500038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.500252] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.500616] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 986.500898] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a949238-379d-4a16-9e4b-b0675389af28 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.506132] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 986.506132] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d69d9d-5624-cf6e-f517-4f73a7dbf65a" [ 986.506132] env[62585]: _type = "Task" [ 986.506132] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.516016] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d69d9d-5624-cf6e-f517-4f73a7dbf65a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.535306] env[62585]: DEBUG nova.network.neutron [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Successfully updated port: 6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.546466] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 986.689998] env[62585]: DEBUG nova.compute.utils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 986.691716] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 986.691894] env[62585]: DEBUG nova.network.neutron [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.773158] env[62585]: DEBUG nova.policy [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1307e393a3fd4cf7b4b1a24571f07c64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c32e1b446add43fe92f7db2dd2373f6c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 986.833781] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66eb40df-6157-4c0e-901d-1d538d213dce {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.841979] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c895364d-785a-4fc2-bf70-c708d2246c5c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.871371] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9211d3-d09f-4738-8d51-e5fb6a67837c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.881241] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753a29ac-1187-4b2b-9daa-58d4b0e13b44 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.894824] env[62585]: DEBUG nova.compute.provider_tree [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.021077] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d69d9d-5624-cf6e-f517-4f73a7dbf65a, 'name': SearchDatastore_Task, 'duration_secs': 0.012197} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.021398] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.021636] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.021872] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.022031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.022219] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.022478] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-841ca1bf-aee5-4bd2-b13b-db1d952cb18e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.031905] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.032106] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.033113] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5042676-76a1-4380-802e-3d8d6dfe75dc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.039011] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-474d033c-5bf2-4b6a-95be-f865e8f5dfc9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.039207] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-474d033c-5bf2-4b6a-95be-f865e8f5dfc9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.039360] env[62585]: DEBUG nova.network.neutron [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.040608] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 987.040608] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52385583-76ae-306e-b32b-bf955e176744" [ 987.040608] env[62585]: _type = "Task" [ 987.040608] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.049498] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52385583-76ae-306e-b32b-bf955e176744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.065949] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.077497] env[62585]: DEBUG nova.network.neutron [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updated VIF entry in instance network info cache for port 2eb59df2-5648-46be-995c-88785a05be2a. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.077846] env[62585]: DEBUG nova.network.neutron [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [{"id": "2eb59df2-5648-46be-995c-88785a05be2a", "address": "fa:16:3e:6a:e4:c5", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb59df2-56", "ovs_interfaceid": "2eb59df2-5648-46be-995c-88785a05be2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.198925] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 987.200782] env[62585]: DEBUG nova.network.neutron [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Successfully created port: 7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.397779] env[62585]: DEBUG nova.scheduler.client.report [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.558213] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52385583-76ae-306e-b32b-bf955e176744, 'name': SearchDatastore_Task, 'duration_secs': 0.01459} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.558213] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-186cfb04-c7e0-4e37-8af5-8c0894193b80 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.567021] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 987.567021] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525b6025-3d2d-86a2-78fa-6f29613c8413" [ 987.567021] env[62585]: _type = "Task" [ 987.567021] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.573269] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525b6025-3d2d-86a2-78fa-6f29613c8413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.579995] env[62585]: DEBUG oslo_concurrency.lockutils [req-08cd00eb-c839-4f83-845d-9de4ea37adba req-7e56c2b1-912f-46e3-82e3-938dd056af5c service nova] Releasing lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.588135] env[62585]: DEBUG nova.network.neutron [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.732955] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.733278] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.733461] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.733641] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.733807] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.739027] env[62585]: INFO nova.compute.manager [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Terminating instance [ 987.740626] env[62585]: DEBUG nova.compute.manager [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 987.740852] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 987.743129] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5b4553-0da0-453a-9977-e2cf9340b1f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.755060] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.755060] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6af01fc-7ebe-491a-b998-5ca23357a5cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.766518] env[62585]: DEBUG oslo_vmware.api [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 987.766518] env[62585]: value = "task-1385076" [ 987.766518] env[62585]: _type = "Task" [ 987.766518] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.775856] env[62585]: DEBUG oslo_vmware.api [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.806966] env[62585]: DEBUG nova.network.neutron [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Updating instance_info_cache with network_info: [{"id": "6f3ba893-9718-4923-9cfb-b44924398357", "address": "fa:16:3e:ac:9e:56", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f3ba893-97", "ovs_interfaceid": "6f3ba893-9718-4923-9cfb-b44924398357", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.903063] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.905533] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.191s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.907620] env[62585]: INFO nova.compute.claims [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.932022] env[62585]: INFO nova.scheduler.client.report [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Deleted allocations for instance b0885bdd-bc8d-4311-8388-54bdc22144c2 [ 988.076443] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525b6025-3d2d-86a2-78fa-6f29613c8413, 'name': SearchDatastore_Task, 'duration_secs': 0.034394} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.076443] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.076668] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8/0d256aa0-a873-4ff1-8c43-464d8b2d03a8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.076845] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3acfb599-55c1-44e0-9327-df6e31a57837 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.083680] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 988.083680] env[62585]: value = "task-1385077" [ 988.083680] env[62585]: _type = "Task" [ 988.083680] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.091913] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.191863] env[62585]: DEBUG nova.compute.manager [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Received event network-vif-plugged-6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 988.192143] env[62585]: DEBUG oslo_concurrency.lockutils [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] Acquiring lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.192308] env[62585]: DEBUG oslo_concurrency.lockutils [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.192480] env[62585]: DEBUG oslo_concurrency.lockutils [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.192649] env[62585]: DEBUG nova.compute.manager [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] No waiting events found dispatching network-vif-plugged-6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 988.192815] env[62585]: WARNING nova.compute.manager [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Received unexpected event network-vif-plugged-6f3ba893-9718-4923-9cfb-b44924398357 for instance with vm_state building and task_state spawning. [ 988.192979] env[62585]: DEBUG nova.compute.manager [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Received event network-changed-6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 988.193228] env[62585]: DEBUG nova.compute.manager [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Refreshing instance network info cache due to event network-changed-6f3ba893-9718-4923-9cfb-b44924398357. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 988.193437] env[62585]: DEBUG oslo_concurrency.lockutils [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] Acquiring lock "refresh_cache-474d033c-5bf2-4b6a-95be-f865e8f5dfc9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.213221] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 988.239433] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 988.239703] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 988.239864] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.240059] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 988.240219] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.240409] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 988.240637] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 988.240805] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 988.240978] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 988.241178] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 988.241365] env[62585]: DEBUG nova.virt.hardware [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 988.243019] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceb8c93-98cf-4946-85e6-486d97012bbe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.251213] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15ff12c-597f-4cb7-87e3-711314f9b35a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.276371] env[62585]: DEBUG oslo_vmware.api [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385076, 'name': PowerOffVM_Task, 'duration_secs': 0.459043} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.276690] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.276889] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.277238] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cebc0080-ad19-4eda-a573-4ec5fcbdbd83 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.309943] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-474d033c-5bf2-4b6a-95be-f865e8f5dfc9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.310372] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Instance network_info: |[{"id": "6f3ba893-9718-4923-9cfb-b44924398357", "address": "fa:16:3e:ac:9e:56", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f3ba893-97", "ovs_interfaceid": "6f3ba893-9718-4923-9cfb-b44924398357", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 988.310833] env[62585]: DEBUG oslo_concurrency.lockutils [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] Acquired lock "refresh_cache-474d033c-5bf2-4b6a-95be-f865e8f5dfc9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.311190] env[62585]: DEBUG nova.network.neutron [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Refreshing network info cache for port 6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.312954] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:9e:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f3ba893-9718-4923-9cfb-b44924398357', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.321588] env[62585]: DEBUG oslo.service.loopingcall [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.325787] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.326655] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86a6898d-d49f-4d7e-a002-37d5ecd7bb4b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.350228] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.350228] env[62585]: value = "task-1385079" [ 988.350228] env[62585]: _type = "Task" [ 988.350228] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.361536] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385079, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.363343] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.363639] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.363948] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Deleting the datastore file [datastore1] 8d84e240-2dc3-4680-9ee7-b705d4e7749a {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.364312] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00b38cef-5cb4-4562-a2f0-332e53492870 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.373291] env[62585]: DEBUG oslo_vmware.api [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for the task: (returnval){ [ 988.373291] env[62585]: value = "task-1385080" [ 988.373291] env[62585]: _type = "Task" [ 988.373291] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.383745] env[62585]: DEBUG oslo_vmware.api [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.443991] env[62585]: DEBUG oslo_concurrency.lockutils [None req-8d87cd3c-1759-4c59-a36d-6df4c972feca tempest-ImagesTestJSON-1900763289 tempest-ImagesTestJSON-1900763289-project-member] Lock "b0885bdd-bc8d-4311-8388-54bdc22144c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.664s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.594504] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44704} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.594754] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8/0d256aa0-a873-4ff1-8c43-464d8b2d03a8.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.594988] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.595288] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-663c842c-e92d-43a6-bf24-79b8b6ff7e49 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.602703] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 988.602703] env[62585]: value = "task-1385081" [ 988.602703] env[62585]: _type = "Task" [ 988.602703] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.611876] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.648733] env[62585]: DEBUG nova.network.neutron [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Updated VIF entry in instance network info cache for port 6f3ba893-9718-4923-9cfb-b44924398357. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.649353] env[62585]: DEBUG nova.network.neutron [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Updating instance_info_cache with network_info: [{"id": "6f3ba893-9718-4923-9cfb-b44924398357", "address": "fa:16:3e:ac:9e:56", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f3ba893-97", "ovs_interfaceid": "6f3ba893-9718-4923-9cfb-b44924398357", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.863067] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385079, 'name': CreateVM_Task, 'duration_secs': 0.453143} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.863406] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.864690] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.864690] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.864690] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.864889] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-335b0a70-0d84-4bb8-b45f-951ac2362c38 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.870415] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 988.870415] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5220a7d8-2dd7-387d-4f6c-5d143cf50dff" [ 988.870415] env[62585]: _type = "Task" [ 988.870415] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.882808] env[62585]: DEBUG oslo_vmware.api [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Task: {'id': task-1385080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28386} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.887383] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.887937] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.887937] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.888086] env[62585]: INFO nova.compute.manager [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 988.888242] env[62585]: DEBUG oslo.service.loopingcall [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.888410] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5220a7d8-2dd7-387d-4f6c-5d143cf50dff, 'name': SearchDatastore_Task, 'duration_secs': 0.010485} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.888638] env[62585]: DEBUG nova.compute.manager [-] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 988.888737] env[62585]: DEBUG nova.network.neutron [-] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.890875] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.891122] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.891490] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.891490] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.891658] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.891949] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09c41ce5-dd9e-4101-8b22-304fa0c56b54 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.901388] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.901568] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.902298] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f98b66b-650c-4b0b-b313-393e179964c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.907648] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 988.907648] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d43e-b036-6d6e-e67b-28fcc322844e" [ 988.907648] env[62585]: _type = "Task" [ 988.907648] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.918645] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d43e-b036-6d6e-e67b-28fcc322844e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.009541] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33708a58-4ff2-4b19-a8f2-b95e37ddd9ac {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.017272] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ba16f5-85d1-47a8-a207-109025fc3527 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.050825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f862b8-0c93-4b2a-bff2-3f659b0bc461 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.059513] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b18872-f945-4347-a2bc-9bba92e4fa37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.075776] env[62585]: DEBUG nova.compute.provider_tree [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.111967] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08422} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.112442] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.113282] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b8b26a-060f-4363-80be-7fe1cfa7009d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.136283] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8/0d256aa0-a873-4ff1-8c43-464d8b2d03a8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.136555] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23eee857-8261-494f-9ba8-998d90763bc8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.152151] env[62585]: DEBUG oslo_concurrency.lockutils [req-f7efeeff-fbcc-4dc5-99df-2e3d0f8e3cb3 req-6b7757fb-c0e0-4e4d-b9d2-2d5f3a766b0b service nova] Releasing lock "refresh_cache-474d033c-5bf2-4b6a-95be-f865e8f5dfc9" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.155583] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 989.155583] env[62585]: value = "task-1385082" [ 989.155583] env[62585]: _type = "Task" [ 989.155583] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.163234] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.419889] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e9d43e-b036-6d6e-e67b-28fcc322844e, 'name': SearchDatastore_Task, 'duration_secs': 0.00955} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.420874] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-444435c7-53f0-4f1f-aa24-bde51f2db6df {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.427466] env[62585]: DEBUG nova.network.neutron [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Successfully updated port: 7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 989.432195] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 989.432195] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525fe2b7-7e4f-89ef-84f6-05a2bb56580b" [ 989.432195] env[62585]: _type = "Task" [ 989.432195] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.442413] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525fe2b7-7e4f-89ef-84f6-05a2bb56580b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.579400] env[62585]: DEBUG nova.scheduler.client.report [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.665061] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385082, 'name': ReconfigVM_Task, 'duration_secs': 0.275068} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.665343] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8/0d256aa0-a873-4ff1-8c43-464d8b2d03a8.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.665938] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77d45c65-4c8b-4446-9707-8a875f1418a6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.672568] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 989.672568] env[62585]: value = "task-1385083" [ 989.672568] env[62585]: _type = "Task" [ 989.672568] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.681090] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385083, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.930791] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.935112] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.935112] env[62585]: DEBUG nova.network.neutron [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 989.944896] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]525fe2b7-7e4f-89ef-84f6-05a2bb56580b, 'name': SearchDatastore_Task, 'duration_secs': 0.009392} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.945784] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.946062] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 474d033c-5bf2-4b6a-95be-f865e8f5dfc9/474d033c-5bf2-4b6a-95be-f865e8f5dfc9.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.946327] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea4702a3-2c7b-4a5f-84bc-53be8eeb643a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.953509] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 989.953509] env[62585]: value = "task-1385084" [ 989.953509] env[62585]: _type = "Task" [ 989.953509] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.963373] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.988573] env[62585]: DEBUG nova.network.neutron [-] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.083892] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.178s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.084668] env[62585]: DEBUG nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 990.091127] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.022s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.091127] env[62585]: INFO nova.compute.claims [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.183078] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385083, 'name': Rename_Task, 'duration_secs': 0.141601} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.183461] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.183725] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51445c65-d338-48b2-8ea1-d67e53915f17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.190915] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 990.190915] env[62585]: value = "task-1385085" [ 990.190915] env[62585]: _type = "Task" [ 990.190915] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.199765] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.227939] env[62585]: DEBUG nova.compute.manager [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Received event network-vif-plugged-7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 990.227939] env[62585]: DEBUG oslo_concurrency.lockutils [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.227939] env[62585]: DEBUG oslo_concurrency.lockutils [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.230419] env[62585]: DEBUG oslo_concurrency.lockutils [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.230419] env[62585]: DEBUG nova.compute.manager [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] No waiting events found dispatching network-vif-plugged-7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 990.230419] env[62585]: WARNING nova.compute.manager [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Received unexpected event network-vif-plugged-7e93d590-92de-4cbe-9262-4085c844ee88 for instance with vm_state building and task_state spawning. [ 990.230419] env[62585]: DEBUG nova.compute.manager [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Received event network-changed-7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 990.230591] env[62585]: DEBUG nova.compute.manager [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Refreshing instance network info cache due to event network-changed-7e93d590-92de-4cbe-9262-4085c844ee88. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 990.230628] env[62585]: DEBUG oslo_concurrency.lockutils [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] Acquiring lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.465322] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49424} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.465791] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 474d033c-5bf2-4b6a-95be-f865e8f5dfc9/474d033c-5bf2-4b6a-95be-f865e8f5dfc9.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.466075] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.466388] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee10ddff-21c2-4911-bb72-3df64954e95c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.472801] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 990.472801] env[62585]: value = "task-1385086" [ 990.472801] env[62585]: _type = "Task" [ 990.472801] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.481766] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.491453] env[62585]: INFO nova.compute.manager [-] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Took 1.60 seconds to deallocate network for instance. [ 990.498351] env[62585]: DEBUG nova.network.neutron [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 990.589839] env[62585]: DEBUG nova.compute.utils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.591471] env[62585]: DEBUG nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Not allocating networking since 'none' was specified. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 990.641951] env[62585]: DEBUG nova.network.neutron [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.701731] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385085, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.983363] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064924} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.983726] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.984489] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4d7ca5-e1ab-4e40-a387-c14ec561981f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.998527] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.007363] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 474d033c-5bf2-4b6a-95be-f865e8f5dfc9/474d033c-5bf2-4b6a-95be-f865e8f5dfc9.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.007638] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa661d16-e4fe-40ad-a1ad-72f390e0034e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.026578] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 991.026578] env[62585]: value = "task-1385087" [ 991.026578] env[62585]: _type = "Task" [ 991.026578] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.033873] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.093029] env[62585]: DEBUG nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 991.144213] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.144514] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Instance network_info: |[{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 991.145046] env[62585]: DEBUG oslo_concurrency.lockutils [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] Acquired lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.145336] env[62585]: DEBUG nova.network.neutron [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Refreshing network info cache for port 7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.146707] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:2c:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e93d590-92de-4cbe-9262-4085c844ee88', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.154320] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Creating folder: Project (c32e1b446add43fe92f7db2dd2373f6c). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 991.157701] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b5dd8f6-7b94-4ead-ae9f-fa096115e329 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.170713] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Created folder: Project (c32e1b446add43fe92f7db2dd2373f6c) in parent group-v293962. [ 991.170896] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Creating folder: Instances. Parent ref: group-v294069. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 991.171161] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fff66b49-ff47-445c-a014-444f87f5b4b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.179768] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Created folder: Instances in parent group-v294069. [ 991.180012] env[62585]: DEBUG oslo.service.loopingcall [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.182233] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 991.182592] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce38219d-8c13-4153-8132-d3ffbf4c2de0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.207238] env[62585]: DEBUG oslo_vmware.api [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385085, 'name': PowerOnVM_Task, 'duration_secs': 0.647067} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.208443] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.208656] env[62585]: INFO nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Took 7.41 seconds to spawn the instance on the hypervisor. [ 991.208839] env[62585]: DEBUG nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.209092] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.209092] env[62585]: value = "task-1385090" [ 991.209092] env[62585]: _type = "Task" [ 991.209092] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.210460] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca4fbbe-1a82-4722-a005-5b9b17269b11 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.213211] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b8c8d0-3689-4924-8d34-42b5fad161cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.230334] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385090, 'name': CreateVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.231177] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba3ec43-5c9c-4205-99d3-93ad7e51eeae {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.260675] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f857c279-c707-4f34-89af-ea1d0b1e69c7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.268919] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feec9cb5-6d37-4c0a-9650-5e7bf5a38b63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.282597] env[62585]: DEBUG nova.compute.provider_tree [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.536923] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.722752] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385090, 'name': CreateVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.734560] env[62585]: INFO nova.compute.manager [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Took 16.96 seconds to build instance. [ 991.786083] env[62585]: DEBUG nova.scheduler.client.report [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.872509] env[62585]: DEBUG nova.network.neutron [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updated VIF entry in instance network info cache for port 7e93d590-92de-4cbe-9262-4085c844ee88. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.872880] env[62585]: DEBUG nova.network.neutron [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.036432] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385087, 'name': ReconfigVM_Task, 'duration_secs': 0.841407} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.036800] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 474d033c-5bf2-4b6a-95be-f865e8f5dfc9/474d033c-5bf2-4b6a-95be-f865e8f5dfc9.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.037407] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0a22e2d-eded-4905-8390-d7f17fa31dea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.043895] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 992.043895] env[62585]: value = "task-1385091" [ 992.043895] env[62585]: _type = "Task" [ 992.043895] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.051279] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385091, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.102952] env[62585]: DEBUG nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 992.127490] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.127746] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.127910] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.128111] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.128264] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.128416] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.128719] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.128902] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.129120] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.129291] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.129482] env[62585]: DEBUG nova.virt.hardware [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.130355] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c24c29-06dd-4595-9472-729d66fc43b0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.138295] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cc526d-aea6-489b-8a29-b3a260ed592a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.152179] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.157537] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Creating folder: Project (4c27e283ec3e44a8875ab69c8858543b). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 992.157810] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03e78c33-1321-4890-8913-1ac52cab0fc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.167884] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Created folder: Project (4c27e283ec3e44a8875ab69c8858543b) in parent group-v293962. [ 992.168082] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Creating folder: Instances. Parent ref: group-v294072. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 992.168304] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbf571f1-006e-4f3f-a09b-0410fe049d82 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.176884] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Created folder: Instances in parent group-v294072. [ 992.177125] env[62585]: DEBUG oslo.service.loopingcall [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.177315] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.177506] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60eecd4f-da28-4e76-91ee-4f24b03514f8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.193345] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.193345] env[62585]: value = "task-1385094" [ 992.193345] env[62585]: _type = "Task" [ 992.193345] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.200535] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385094, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.222138] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385090, 'name': CreateVM_Task, 'duration_secs': 0.690715} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.222287] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.222954] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.223130] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.223473] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 992.223717] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f0386f9-ce1b-48cb-b2b5-8e622311ad93 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.228570] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 992.228570] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a1138e-f868-853c-9bb2-9c2eb0a4c234" [ 992.228570] env[62585]: _type = "Task" [ 992.228570] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.235815] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a1138e-f868-853c-9bb2-9c2eb0a4c234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.236224] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bd4d9cbd-a515-41e3-8b3c-0a3ac95a254e tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.464s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.291518] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.292254] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 992.295053] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.297s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.295312] env[62585]: DEBUG nova.objects.instance [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lazy-loading 'resources' on Instance uuid 8d84e240-2dc3-4680-9ee7-b705d4e7749a {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.375604] env[62585]: DEBUG oslo_concurrency.lockutils [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] Releasing lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.375917] env[62585]: DEBUG nova.compute.manager [req-e54828d8-5dd3-42db-85c7-740c290baaed req-f8b22ce1-f9a1-4712-b1ed-fb65942a8789 service nova] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Received event network-vif-deleted-8d5c80a7-e25e-414e-a45f-a43d747618bc {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.554451] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385091, 'name': Rename_Task, 'duration_secs': 0.296929} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.554744] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.554993] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ea6cede-a139-4f1d-81c4-a1f69259242f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.561379] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 992.561379] env[62585]: value = "task-1385095" [ 992.561379] env[62585]: _type = "Task" [ 992.561379] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.568786] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.702908] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385094, 'name': CreateVM_Task, 'duration_secs': 0.278976} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.703090] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.703529] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.703693] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.704027] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 992.704284] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6b16b02-943e-4343-8644-6ecaef93e17b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.708733] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 992.708733] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52272e4c-7b57-ef1d-7304-e750fbb8657d" [ 992.708733] env[62585]: _type = "Task" [ 992.708733] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.716320] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52272e4c-7b57-ef1d-7304-e750fbb8657d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.739054] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a1138e-f868-853c-9bb2-9c2eb0a4c234, 'name': SearchDatastore_Task, 'duration_secs': 0.013226} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.739462] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.739689] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 992.739926] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.740145] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.740373] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.740644] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-968c672d-119d-4056-baea-d323daa62d32 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.751762] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.751949] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 992.752682] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-901c3434-fad6-4698-85e4-e28e7e572960 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.757791] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 992.757791] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f5e466-e8de-d143-dd33-d748ba0123d3" [ 992.757791] env[62585]: _type = "Task" [ 992.757791] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.765545] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f5e466-e8de-d143-dd33-d748ba0123d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.798917] env[62585]: DEBUG nova.compute.utils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 992.802548] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 992.802710] env[62585]: DEBUG nova.network.neutron [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 992.841383] env[62585]: DEBUG nova.policy [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73ef782ef63e424195872ee2cf9928b0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca61db1ff3fb4f5cae3dc18e70af8ba7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 992.893405] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df86986c-946d-42b5-92b8-00bfba784cd2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.901502] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ecd48d-e86b-47e4-9ca4-a2c272308864 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.932288] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f42af19-9367-4fb8-805f-6db8f03d589a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.940662] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42615a2f-f821-4155-8ce2-03ba9b3b35f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.954438] env[62585]: DEBUG nova.compute.provider_tree [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.071778] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385095, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.085940] env[62585]: DEBUG nova.network.neutron [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Successfully created port: 02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.221532] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52272e4c-7b57-ef1d-7304-e750fbb8657d, 'name': SearchDatastore_Task, 'duration_secs': 0.015374} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.222198] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.222287] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.222505] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.223746] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.223746] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.223746] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecee633f-3af0-4bde-8dfa-7fa520ec9cd3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.231668] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.231843] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 993.232556] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7190c8af-c780-45de-80de-e20cf6176ba4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.237988] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 993.237988] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527708e1-4d8d-6993-a12f-eb324ed84103" [ 993.237988] env[62585]: _type = "Task" [ 993.237988] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.246385] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527708e1-4d8d-6993-a12f-eb324ed84103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.267566] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f5e466-e8de-d143-dd33-d748ba0123d3, 'name': SearchDatastore_Task, 'duration_secs': 0.01223} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.268457] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-979bd029-8aaa-487f-bae5-1893485b4efc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.273494] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 993.273494] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c3896a-7e0b-d4d9-3bb5-1ecee33ea818" [ 993.273494] env[62585]: _type = "Task" [ 993.273494] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.283618] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c3896a-7e0b-d4d9-3bb5-1ecee33ea818, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.305520] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 993.457309] env[62585]: DEBUG nova.scheduler.client.report [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.571878] env[62585]: DEBUG oslo_vmware.api [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385095, 'name': PowerOnVM_Task, 'duration_secs': 0.828013} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.572172] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.572515] env[62585]: INFO nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Took 7.58 seconds to spawn the instance on the hypervisor. [ 993.572748] env[62585]: DEBUG nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.573637] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fabfc02-44d5-4852-890c-9e9192890fee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.752323] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527708e1-4d8d-6993-a12f-eb324ed84103, 'name': SearchDatastore_Task, 'duration_secs': 0.010505} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.753499] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57eadab2-0ac1-43cc-8392-01627fc56ad6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.760188] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 993.760188] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522e4c3b-517b-b622-704e-87c581de2401" [ 993.760188] env[62585]: _type = "Task" [ 993.760188] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.770307] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522e4c3b-517b-b622-704e-87c581de2401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.782400] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c3896a-7e0b-d4d9-3bb5-1ecee33ea818, 'name': SearchDatastore_Task, 'duration_secs': 0.025234} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.782659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.782916] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] bb076a4e-eb38-4d0c-bdea-f8ebb46d7968/bb076a4e-eb38-4d0c-bdea-f8ebb46d7968.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 993.783175] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a46eb271-589a-4369-9271-7010076cc92a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.789832] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 993.789832] env[62585]: value = "task-1385096" [ 993.789832] env[62585]: _type = "Task" [ 993.789832] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.798574] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.801244] env[62585]: DEBUG nova.compute.manager [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Stashing vm_state: active {{(pid=62585) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 993.963055] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.986789] env[62585]: INFO nova.scheduler.client.report [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Deleted allocations for instance 8d84e240-2dc3-4680-9ee7-b705d4e7749a [ 994.096084] env[62585]: INFO nova.compute.manager [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Took 13.37 seconds to build instance. [ 994.273030] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522e4c3b-517b-b622-704e-87c581de2401, 'name': SearchDatastore_Task, 'duration_secs': 0.00984} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.273271] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.273558] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.273823] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe43f93f-f01e-4a6d-a87c-2ef85dd4afe1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.281171] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 994.281171] env[62585]: value = "task-1385097" [ 994.281171] env[62585]: _type = "Task" [ 994.281171] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.288953] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.298672] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385096, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.314331] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 994.318352] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.318596] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.345994] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.346368] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.346587] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.346839] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.347033] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.347196] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.347459] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.347632] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.347838] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.348042] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.348221] env[62585]: DEBUG nova.virt.hardware [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.349147] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f40c03b-2c9c-4162-9a02-d89e2e855bc9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.357829] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35391fc6-94f0-4e76-9601-8c28b2a975c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.494649] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfe072c-4775-4424-bd46-dcb94c274057 tempest-AttachInterfacesV270Test-60456381 tempest-AttachInterfacesV270Test-60456381-project-member] Lock "8d84e240-2dc3-4680-9ee7-b705d4e7749a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.761s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.524687] env[62585]: DEBUG nova.compute.manager [req-81ed2946-4105-4770-aab1-ed7c578c6137 req-a0dffce7-01ac-4578-b5e1-f57254ed133c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Received event network-vif-plugged-02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.524908] env[62585]: DEBUG oslo_concurrency.lockutils [req-81ed2946-4105-4770-aab1-ed7c578c6137 req-a0dffce7-01ac-4578-b5e1-f57254ed133c service nova] Acquiring lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.525145] env[62585]: DEBUG oslo_concurrency.lockutils [req-81ed2946-4105-4770-aab1-ed7c578c6137 req-a0dffce7-01ac-4578-b5e1-f57254ed133c service nova] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.525371] env[62585]: DEBUG oslo_concurrency.lockutils [req-81ed2946-4105-4770-aab1-ed7c578c6137 req-a0dffce7-01ac-4578-b5e1-f57254ed133c service nova] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.525600] env[62585]: DEBUG nova.compute.manager [req-81ed2946-4105-4770-aab1-ed7c578c6137 req-a0dffce7-01ac-4578-b5e1-f57254ed133c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] No waiting events found dispatching network-vif-plugged-02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 994.525718] env[62585]: WARNING nova.compute.manager [req-81ed2946-4105-4770-aab1-ed7c578c6137 req-a0dffce7-01ac-4578-b5e1-f57254ed133c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Received unexpected event network-vif-plugged-02d93fe0-638c-43ca-8ed9-c67acc2340c0 for instance with vm_state building and task_state spawning. [ 994.599162] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dd09a72b-1acb-4363-a22b-85a8e4e7bf9c tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.897s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.606924] env[62585]: DEBUG nova.network.neutron [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Successfully updated port: 02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.662705] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.662981] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.663214] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.663426] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.663602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.665737] env[62585]: INFO nova.compute.manager [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Terminating instance [ 994.667582] env[62585]: DEBUG nova.compute.manager [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 994.667778] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.668612] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9522c73-b55e-4386-8108-51b7fe7358ef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.676260] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.676491] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c71d3cc-b3fd-4103-9934-20de54b52420 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.682312] env[62585]: DEBUG oslo_vmware.api [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 994.682312] env[62585]: value = "task-1385098" [ 994.682312] env[62585]: _type = "Task" [ 994.682312] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.690629] env[62585]: DEBUG oslo_vmware.api [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.792714] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.800316] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518983} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.800564] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] bb076a4e-eb38-4d0c-bdea-f8ebb46d7968/bb076a4e-eb38-4d0c-bdea-f8ebb46d7968.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 994.800779] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.801031] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fac42aa-ce3f-4b90-9ce7-2c5b3a08e58f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.807210] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 994.807210] env[62585]: value = "task-1385099" [ 994.807210] env[62585]: _type = "Task" [ 994.807210] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.814388] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.824028] env[62585]: INFO nova.compute.claims [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.114308] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.114308] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.114308] env[62585]: DEBUG nova.network.neutron [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.195130] env[62585]: DEBUG oslo_vmware.api [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385098, 'name': PowerOffVM_Task, 'duration_secs': 0.339726} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.195454] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.195657] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.195929] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24ecd927-f5f3-4d05-b584-affb3ce8f912 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.283049] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.283348] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.283626] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleting the datastore file [datastore1] 474d033c-5bf2-4b6a-95be-f865e8f5dfc9 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.287909] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b4e980d-2e31-410c-8f3a-af835922d19f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.294908] env[62585]: DEBUG oslo_vmware.api [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 995.294908] env[62585]: value = "task-1385101" [ 995.294908] env[62585]: _type = "Task" [ 995.294908] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.297941] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385097, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.306015] env[62585]: DEBUG oslo_vmware.api [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.315717] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192958} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.316332] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.316725] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67be6fbf-c3a8-446b-accd-593c63b8571f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.331457] env[62585]: INFO nova.compute.resource_tracker [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating resource usage from migration 39ea0e61-87e1-4e55-843c-715aed911150 [ 995.342183] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] bb076a4e-eb38-4d0c-bdea-f8ebb46d7968/bb076a4e-eb38-4d0c-bdea-f8ebb46d7968.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.342776] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0345e16-73c2-4fc2-aa66-348e889afbfc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.362322] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 995.362322] env[62585]: value = "task-1385102" [ 995.362322] env[62585]: _type = "Task" [ 995.362322] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.372403] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385102, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.445455] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcc6ebb-dd2d-4afc-98eb-31daad4564a9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.453604] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1007cdc2-889a-4d6f-8ce7-f039fc5da883 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.489738] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05971c3-ea64-435c-a152-1567416a2ba6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.498720] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cac2bd-a717-4c74-b100-8622eb75c378 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.513237] env[62585]: DEBUG nova.compute.provider_tree [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.791985] env[62585]: DEBUG nova.network.neutron [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.796762] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385097, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.040272} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.798493] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.798813] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.799227] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4c9286a-5fde-49a1-b41e-c148d4affaa0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.809850] env[62585]: DEBUG oslo_vmware.api [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248111} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.811107] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.811400] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.811675] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.811944] env[62585]: INFO nova.compute.manager [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 995.812383] env[62585]: DEBUG oslo.service.loopingcall [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.812708] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 995.812708] env[62585]: value = "task-1385103" [ 995.812708] env[62585]: _type = "Task" [ 995.812708] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.813140] env[62585]: DEBUG nova.compute.manager [-] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 995.813140] env[62585]: DEBUG nova.network.neutron [-] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.826661] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.877765] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385102, 'name': ReconfigVM_Task, 'duration_secs': 0.335756} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.878055] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Reconfigured VM instance instance-00000064 to attach disk [datastore1] bb076a4e-eb38-4d0c-bdea-f8ebb46d7968/bb076a4e-eb38-4d0c-bdea-f8ebb46d7968.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 995.878674] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eec103ea-bbc3-4443-a957-dcc40df94ecb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.884460] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 995.884460] env[62585]: value = "task-1385104" [ 995.884460] env[62585]: _type = "Task" [ 995.884460] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.892449] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385104, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.016662] env[62585]: DEBUG nova.scheduler.client.report [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 996.064747] env[62585]: DEBUG nova.network.neutron [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updating instance_info_cache with network_info: [{"id": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "address": "fa:16:3e:52:fc:53", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02d93fe0-63", "ovs_interfaceid": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.326639] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067229} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.327353] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.328171] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129d2273-9a67-4f20-b077-6837ffefdbfc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.347495] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.347808] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-753a6667-5e68-4032-bbe9-bc2fb4c76bdc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.368846] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 996.368846] env[62585]: value = "task-1385105" [ 996.368846] env[62585]: _type = "Task" [ 996.368846] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.377049] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385105, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.107718] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.789s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.107934] env[62585]: INFO nova.compute.manager [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Migrating [ 997.114046] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.114308] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Instance network_info: |[{"id": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "address": "fa:16:3e:52:fc:53", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02d93fe0-63", "ovs_interfaceid": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 997.114521] env[62585]: DEBUG nova.network.neutron [-] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.119016] env[62585]: DEBUG nova.compute.manager [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Received event network-changed-02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.119016] env[62585]: DEBUG nova.compute.manager [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Refreshing instance network info cache due to event network-changed-02d93fe0-638c-43ca-8ed9-c67acc2340c0. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 997.119016] env[62585]: DEBUG oslo_concurrency.lockutils [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] Acquiring lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.119016] env[62585]: DEBUG oslo_concurrency.lockutils [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] Acquired lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.119016] env[62585]: DEBUG nova.network.neutron [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Refreshing network info cache for port 02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.120919] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:fc:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86b8f7fc-c105-4bcb-a4ec-c363ed38b17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02d93fe0-638c-43ca-8ed9-c67acc2340c0', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.128124] env[62585]: DEBUG oslo.service.loopingcall [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.141616] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.142138] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385104, 'name': Rename_Task, 'duration_secs': 0.153138} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.142595] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29108753-7ebc-471b-9d80-e521b0550561 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.156980] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.158557] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc2cbd41-9f4b-4a07-b87f-e328486967ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.163724] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385105, 'name': ReconfigVM_Task, 'duration_secs': 0.288189} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.164269] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.164858] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f805a83a-7c01-4a39-810b-4c12c685a956 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.167762] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.167762] env[62585]: value = "task-1385106" [ 997.167762] env[62585]: _type = "Task" [ 997.167762] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.171553] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 997.171553] env[62585]: value = "task-1385107" [ 997.171553] env[62585]: _type = "Task" [ 997.171553] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.173467] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 997.173467] env[62585]: value = "task-1385108" [ 997.173467] env[62585]: _type = "Task" [ 997.173467] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.183443] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385106, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.191176] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385108, 'name': Rename_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.191414] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.403479] env[62585]: DEBUG nova.network.neutron [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updated VIF entry in instance network info cache for port 02d93fe0-638c-43ca-8ed9-c67acc2340c0. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.403479] env[62585]: DEBUG nova.network.neutron [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updating instance_info_cache with network_info: [{"id": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "address": "fa:16:3e:52:fc:53", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02d93fe0-63", "ovs_interfaceid": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.620091] env[62585]: INFO nova.compute.manager [-] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Took 1.81 seconds to deallocate network for instance. [ 997.643450] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.643660] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.643843] env[62585]: DEBUG nova.network.neutron [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.680157] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385106, 'name': CreateVM_Task, 'duration_secs': 0.348416} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.683312] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.684067] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.684247] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.684598] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 997.688597] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc18c3bc-046e-481c-805d-42b0fd4f6ffb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.690013] env[62585]: DEBUG oslo_vmware.api [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385107, 'name': PowerOnVM_Task, 'duration_secs': 0.512153} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.693406] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.693618] env[62585]: INFO nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Took 9.48 seconds to spawn the instance on the hypervisor. [ 997.693801] env[62585]: DEBUG nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 997.694291] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385108, 'name': Rename_Task, 'duration_secs': 0.154355} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.695300] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e90d61e-f1d6-4f85-953a-7410c54614fd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.697642] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.699061] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc9b8844-78f4-43ef-94b2-f515b1a7c522 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.700735] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 997.700735] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52ddd352-a37e-1f07-c93e-e357a8f0711e" [ 997.700735] env[62585]: _type = "Task" [ 997.700735] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.709409] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 997.709409] env[62585]: value = "task-1385109" [ 997.709409] env[62585]: _type = "Task" [ 997.709409] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.719018] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52ddd352-a37e-1f07-c93e-e357a8f0711e, 'name': SearchDatastore_Task, 'duration_secs': 0.012255} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.719018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.719018] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.719018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.719018] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.719018] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.719018] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11fe1ba8-f9a1-4ffa-b9cf-f5c0f67a0ae0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.722886] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385109, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.729351] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.729786] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.730249] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cec86209-c872-40d8-b45c-ec1c5a8f3acf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.735462] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 997.735462] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526d94ed-d506-28ea-682e-e4c3cb55ec1b" [ 997.735462] env[62585]: _type = "Task" [ 997.735462] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.745029] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526d94ed-d506-28ea-682e-e4c3cb55ec1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.905520] env[62585]: DEBUG oslo_concurrency.lockutils [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] Releasing lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.905813] env[62585]: DEBUG nova.compute.manager [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Received event network-vif-deleted-6f3ba893-9718-4923-9cfb-b44924398357 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.905995] env[62585]: INFO nova.compute.manager [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Neutron deleted interface 6f3ba893-9718-4923-9cfb-b44924398357; detaching it from the instance and deleting it from the info cache [ 997.906195] env[62585]: DEBUG nova.network.neutron [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.127422] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.127422] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.130474] env[62585]: DEBUG nova.objects.instance [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lazy-loading 'resources' on Instance uuid 474d033c-5bf2-4b6a-95be-f865e8f5dfc9 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.228909] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.232486] env[62585]: INFO nova.compute.manager [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Took 17.26 seconds to build instance. [ 998.246732] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]526d94ed-d506-28ea-682e-e4c3cb55ec1b, 'name': SearchDatastore_Task, 'duration_secs': 0.010616} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.250033] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d2da140-2196-41aa-b235-f653d80fc048 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.256781] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 998.256781] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5288ca1d-ddf2-c4e8-9f88-4862cc426967" [ 998.256781] env[62585]: _type = "Task" [ 998.256781] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.264506] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5288ca1d-ddf2-c4e8-9f88-4862cc426967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.378812] env[62585]: DEBUG nova.network.neutron [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [{"id": "2eb59df2-5648-46be-995c-88785a05be2a", "address": "fa:16:3e:6a:e4:c5", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb59df2-56", "ovs_interfaceid": "2eb59df2-5648-46be-995c-88785a05be2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.409411] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3fc4f359-5428-4e4c-8450-3602eb6b6005 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.418264] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa28457a-786c-4548-9cc8-ddb60365d679 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.444617] env[62585]: DEBUG nova.compute.manager [req-b72fec87-7d0e-47cb-b792-355ca678cfc0 req-109de61b-9173-4b97-a4ee-d439d56bb17c service nova] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Detach interface failed, port_id=6f3ba893-9718-4923-9cfb-b44924398357, reason: Instance 474d033c-5bf2-4b6a-95be-f865e8f5dfc9 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 998.721651] env[62585]: DEBUG oslo_vmware.api [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385109, 'name': PowerOnVM_Task, 'duration_secs': 0.71299} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.724860] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.725094] env[62585]: INFO nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Took 6.62 seconds to spawn the instance on the hypervisor. [ 998.725282] env[62585]: DEBUG nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 998.726346] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f3141f-a1b8-47d5-b449-310879aab350 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.740540] env[62585]: DEBUG oslo_concurrency.lockutils [None req-188c12d7-79ed-4a39-bfa2-0dd591370f1e tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.773s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.755108] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4913f645-6e5c-4220-bb4c-a1bca4841ac5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.764881] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa581458-50c6-4820-82e6-f0ac55e41894 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.771631] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5288ca1d-ddf2-c4e8-9f88-4862cc426967, 'name': SearchDatastore_Task, 'duration_secs': 0.015121} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.773145] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.773573] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/92b90694-2bb1-431c-b2c0-ad2f229f4a75.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.774244] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b53068d9-8da4-4258-8ab2-16a4cc418094 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.803795] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927ec8f0-b658-487d-91b8-cd8df6b87d54 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.812017] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 998.812017] env[62585]: value = "task-1385110" [ 998.812017] env[62585]: _type = "Task" [ 998.812017] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.823086] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5badab-681b-4243-88c2-722c97d097ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.830835] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.840960] env[62585]: DEBUG nova.compute.provider_tree [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.882153] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.046671] env[62585]: DEBUG nova.compute.manager [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Received event network-changed-7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.046920] env[62585]: DEBUG nova.compute.manager [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Refreshing instance network info cache due to event network-changed-7e93d590-92de-4cbe-9262-4085c844ee88. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 999.047236] env[62585]: DEBUG oslo_concurrency.lockutils [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] Acquiring lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.047383] env[62585]: DEBUG oslo_concurrency.lockutils [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] Acquired lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.047555] env[62585]: DEBUG nova.network.neutron [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Refreshing network info cache for port 7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.255868] env[62585]: INFO nova.compute.manager [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Took 16.56 seconds to build instance. [ 999.322797] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385110, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.344028] env[62585]: DEBUG nova.scheduler.client.report [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.758650] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802ddc35-4b3d-41ba-87c2-98aaf8d0655f tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.073s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.823312] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529063} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.823981] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/92b90694-2bb1-431c-b2c0-ad2f229f4a75.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.823981] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.824305] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ecc3f07-8085-4c14-8464-3da75d836070 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.833227] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 999.833227] env[62585]: value = "task-1385111" [ 999.833227] env[62585]: _type = "Task" [ 999.833227] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.837019] env[62585]: DEBUG nova.network.neutron [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updated VIF entry in instance network info cache for port 7e93d590-92de-4cbe-9262-4085c844ee88. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 999.837402] env[62585]: DEBUG nova.network.neutron [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.845269] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.851311] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.870188] env[62585]: INFO nova.scheduler.client.report [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance 474d033c-5bf2-4b6a-95be-f865e8f5dfc9 [ 1000.250371] env[62585]: INFO nova.compute.manager [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Rebuilding instance [ 1000.291742] env[62585]: DEBUG nova.compute.manager [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1000.292714] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2fa635-7b8c-4397-aab5-25fd3ebcc481 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.301142] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "d8955c26-85d1-481c-b1d2-4879bb52158b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.301463] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.339851] env[62585]: DEBUG oslo_concurrency.lockutils [req-7bdf46c0-668c-4d20-a2cd-0bc84abcf382 req-948d0c8b-8e46-462a-9388-b2ff08995302 service nova] Releasing lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.343897] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06486} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.344187] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.345023] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897cd721-78c2-446a-a24f-d3214b8ba31c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.373587] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/92b90694-2bb1-431c-b2c0-ad2f229f4a75.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.375144] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e54f3e8f-d21b-489f-81a4-88e297fe2dd1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.394032] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d3e4894-91e9-48e2-9c63-be48b9f369e2 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "474d033c-5bf2-4b6a-95be-f865e8f5dfc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.731s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.398167] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1000.398167] env[62585]: value = "task-1385112" [ 1000.398167] env[62585]: _type = "Task" [ 1000.398167] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.399376] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604e7e1e-554b-44e1-986e-2a740f19e68e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.420709] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 0 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1000.428231] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385112, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.707893] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.708363] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.806187] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1000.809928] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.810268] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7336e1cd-29ba-42d2-afb5-860b68e8c499 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.817000] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1000.817000] env[62585]: value = "task-1385113" [ 1000.817000] env[62585]: _type = "Task" [ 1000.817000] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.830155] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.908853] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385112, 'name': ReconfigVM_Task, 'duration_secs': 0.410392} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.908853] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/92b90694-2bb1-431c-b2c0-ad2f229f4a75.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.909913] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4612ccbe-dcea-434e-a925-20173d493c0f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.916156] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1000.916156] env[62585]: value = "task-1385114" [ 1000.916156] env[62585]: _type = "Task" [ 1000.916156] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.923880] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385114, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.931260] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.931260] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d299aca5-e81b-4cbe-af31-37b017c98627 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.937181] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1000.937181] env[62585]: value = "task-1385115" [ 1000.937181] env[62585]: _type = "Task" [ 1000.937181] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.945797] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.210638] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1001.332270] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385113, 'name': PowerOffVM_Task, 'duration_secs': 0.125432} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.333373] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.333621] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.335143] env[62585]: INFO nova.compute.claims [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.337634] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.337878] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.340787] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a86a79-3a2a-4b38-874b-d0f62d90abc7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.349311] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.349978] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8502a5f-4b66-49bb-a80f-a5f724c94d9c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.376131] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.376131] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.376131] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Deleting the datastore file [datastore2] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.376869] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e93a502e-0aad-4ad4-b158-a22a848cc791 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.384181] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1001.384181] env[62585]: value = "task-1385117" [ 1001.384181] env[62585]: _type = "Task" [ 1001.384181] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.392498] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.426289] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385114, 'name': Rename_Task, 'duration_secs': 0.190739} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.426559] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.426799] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a4bc99f-0d74-46f0-8744-0843b3f07cba {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.432810] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1001.432810] env[62585]: value = "task-1385118" [ 1001.432810] env[62585]: _type = "Task" [ 1001.432810] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.440115] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.447470] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385115, 'name': PowerOffVM_Task, 'duration_secs': 0.226527} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.447747] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.447964] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 17 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1001.735234] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.739566] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "167b0fe3-d6e0-4249-90ab-7b1181669828" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.739784] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.893744] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12451} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.894128] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.894203] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.894346] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.943825] env[62585]: DEBUG oslo_vmware.api [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385118, 'name': PowerOnVM_Task, 'duration_secs': 0.47066} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.943825] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.944055] env[62585]: INFO nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1001.944168] env[62585]: DEBUG nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1001.945029] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9b683e-4ffb-4c60-8ce7-8873fa40b857 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.955298] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1001.955461] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1001.955577] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.956463] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1001.956463] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.956463] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1001.956463] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1001.956463] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1001.956723] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1001.956770] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1001.959037] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1001.961972] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67e2ffab-0d67-45d4-a4f6-af200061b069 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.977514] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1001.977514] env[62585]: value = "task-1385119" [ 1001.977514] env[62585]: _type = "Task" [ 1001.977514] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.986490] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.242276] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1002.479338] env[62585]: INFO nova.compute.manager [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Took 15.42 seconds to build instance. [ 1002.488389] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385119, 'name': ReconfigVM_Task, 'duration_secs': 0.310703} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.491145] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 33 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1002.506166] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2530b15-888b-4a74-b0aa-57e1534080a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.515025] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1364803-3ce4-4520-b4e4-161e28dc405b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.546031] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9bc4b71-7a64-400a-870e-95e0cc786538 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.554323] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccd8fae-5ca3-4c6b-b28b-74280e9d1e23 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.568115] env[62585]: DEBUG nova.compute.provider_tree [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.760955] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.931728] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.932056] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.932148] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.932341] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.932487] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.932744] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.932981] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.933160] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.933329] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.933496] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.933682] env[62585]: DEBUG nova.virt.hardware [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.934630] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f49b5a-4f4d-407b-8e11-c6e4a08ad9d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.943406] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc4e020-336f-4ef4-a3eb-811316bb4ea4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.956539] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance VIF info [] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.961916] env[62585]: DEBUG oslo.service.loopingcall [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.962161] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1002.962378] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ca4dfae-5e04-4e6e-93b9-82b8583cda12 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.979378] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.979378] env[62585]: value = "task-1385120" [ 1002.979378] env[62585]: _type = "Task" [ 1002.979378] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.982993] env[62585]: DEBUG oslo_concurrency.lockutils [None req-feaf348d-955e-485f-af86-205b7c739f7d tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.939s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.990561] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385120, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.997600] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.997829] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.997987] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.998193] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.998342] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.998490] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.998693] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.998853] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.999031] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.999305] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.999558] env[62585]: DEBUG nova.virt.hardware [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1003.005325] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1003.005542] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b7c9177-8b07-4b4c-b3a1-b7918a1c4c2c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.025684] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1003.025684] env[62585]: value = "task-1385121" [ 1003.025684] env[62585]: _type = "Task" [ 1003.025684] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.035493] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385121, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.072095] env[62585]: DEBUG nova.scheduler.client.report [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1003.156283] env[62585]: INFO nova.compute.manager [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Rescuing [ 1003.156564] env[62585]: DEBUG oslo_concurrency.lockutils [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.156721] env[62585]: DEBUG oslo_concurrency.lockutils [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.156891] env[62585]: DEBUG nova.network.neutron [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.489813] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385120, 'name': CreateVM_Task, 'duration_secs': 0.447985} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.489984] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1003.490434] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.490604] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.490945] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1003.491221] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdb55bba-2eb2-4fcf-94d3-1a633bf17445 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.495696] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1003.495696] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f9c73e-2281-dcc8-e9c2-4948a73fc794" [ 1003.495696] env[62585]: _type = "Task" [ 1003.495696] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.503061] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f9c73e-2281-dcc8-e9c2-4948a73fc794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.535527] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385121, 'name': ReconfigVM_Task, 'duration_secs': 0.198139} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.535906] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1003.536972] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd914811-2d95-4f86-9edd-65f43cfc2d5c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.569653] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8/0d256aa0-a873-4ff1-8c43-464d8b2d03a8.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.569993] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77702898-bae3-4fd2-adc5-f2ec703afd40 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.590599] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.591315] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1003.595050] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.860s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.596795] env[62585]: INFO nova.compute.claims [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.607678] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1003.607678] env[62585]: value = "task-1385122" [ 1003.607678] env[62585]: _type = "Task" [ 1003.607678] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.618163] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.900058] env[62585]: DEBUG nova.network.neutron [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updating instance_info_cache with network_info: [{"id": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "address": "fa:16:3e:52:fc:53", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02d93fe0-63", "ovs_interfaceid": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.006101] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f9c73e-2281-dcc8-e9c2-4948a73fc794, 'name': SearchDatastore_Task, 'duration_secs': 0.025082} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.006415] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.006562] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.006801] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.006951] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.007156] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.007413] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c28bce24-38a9-4376-a06f-5b618fba0025 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.015320] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.015496] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.016179] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-115ede2c-4ce2-40c9-89da-350089fd91c0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.020818] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1004.020818] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529e2b85-7570-315d-aee0-0d93fe9eef0c" [ 1004.020818] env[62585]: _type = "Task" [ 1004.020818] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.028086] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529e2b85-7570-315d-aee0-0d93fe9eef0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.104119] env[62585]: DEBUG nova.compute.utils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1004.107424] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1004.107591] env[62585]: DEBUG nova.network.neutron [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.119020] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.147818] env[62585]: DEBUG nova.policy [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1808605fbf174f1b847e3f066ba78d87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c48f11ec1fa84b4a96ef72198fcec3ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 1004.400525] env[62585]: DEBUG nova.network.neutron [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Successfully created port: be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.402790] env[62585]: DEBUG oslo_concurrency.lockutils [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.531752] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529e2b85-7570-315d-aee0-0d93fe9eef0c, 'name': SearchDatastore_Task, 'duration_secs': 0.010956} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.532599] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-491efb2d-0162-45d5-99e7-5acf879288cd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.538014] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1004.538014] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e1c99d-70a1-d8c9-bb05-cb16f0feaf0c" [ 1004.538014] env[62585]: _type = "Task" [ 1004.538014] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.545795] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e1c99d-70a1-d8c9-bb05-cb16f0feaf0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.608316] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1004.623398] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385122, 'name': ReconfigVM_Task, 'duration_secs': 0.737911} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.623703] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8/0d256aa0-a873-4ff1-8c43-464d8b2d03a8.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.623984] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 50 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1004.743560] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788aab6f-4ecd-4da3-89e2-e0dea6693f1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.751322] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df6ece2-7eb2-48b9-bb2c-d93d42ee19da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.781911] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1592586b-0f3c-4dce-b4e7-ad2bd8535b18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.789270] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f87619-f037-4839-8636-4380924d50a6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.802366] env[62585]: DEBUG nova.compute.provider_tree [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.936672] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.937095] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cd8fd23-04fa-4549-bc55-1c55d717f2ed {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.944589] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1004.944589] env[62585]: value = "task-1385123" [ 1004.944589] env[62585]: _type = "Task" [ 1004.944589] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.953588] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.048896] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52e1c99d-70a1-d8c9-bb05-cb16f0feaf0c, 'name': SearchDatastore_Task, 'duration_secs': 0.009245} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.049283] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.049432] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.049694] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b48e4a01-265b-4756-8989-629b7216c5bb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.056659] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1005.056659] env[62585]: value = "task-1385124" [ 1005.056659] env[62585]: _type = "Task" [ 1005.056659] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.065597] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.133265] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146ae4ca-4e53-4d32-8b2e-a306e4bef4f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.152719] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1152fb9c-09aa-4cd4-a51c-502db14b348a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.171528] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 67 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1005.305412] env[62585]: DEBUG nova.scheduler.client.report [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1005.456271] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385123, 'name': PowerOffVM_Task, 'duration_secs': 0.179461} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.456553] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.457742] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f74a82-72a1-4dac-8447-309e51058c37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.479381] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a88c308-3cdf-43a3-896f-6ae164ce0622 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.511125] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.511596] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a393b72f-ed42-4e12-98c7-35d7791aece0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.518725] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1005.518725] env[62585]: value = "task-1385125" [ 1005.518725] env[62585]: _type = "Task" [ 1005.518725] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.529658] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1005.530036] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.530337] env[62585]: DEBUG oslo_concurrency.lockutils [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.530578] env[62585]: DEBUG oslo_concurrency.lockutils [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.530787] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.531225] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a211047f-5007-4624-877c-c24bd3475af3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.543385] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.543636] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.544595] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b2c5749-fcc7-44ce-b935-463b29b2410a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.552421] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1005.552421] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a19434-1bf9-d25e-8b44-15449341f783" [ 1005.552421] env[62585]: _type = "Task" [ 1005.552421] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.564341] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a19434-1bf9-d25e-8b44-15449341f783, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.570257] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385124, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.621487] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1005.645578] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1005.645925] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1005.645985] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.646180] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1005.646331] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.646483] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1005.646689] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1005.646979] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1005.647041] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1005.647185] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1005.647360] env[62585]: DEBUG nova.virt.hardware [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1005.648235] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74e72e6-31d3-4220-9c8f-86f80764b35e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.656219] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9a9bea-60de-44af-bfc9-e1799c2b9028 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.732453] env[62585]: DEBUG nova.network.neutron [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Port 2eb59df2-5648-46be-995c-88785a05be2a binding to destination host cpu-1 is already ACTIVE {{(pid=62585) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1005.811525] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.812077] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1005.814994] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.054s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.816739] env[62585]: INFO nova.compute.claims [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.820698] env[62585]: DEBUG nova.compute.manager [req-f6b53566-ef43-494b-a796-bb19645935b5 req-e620bbbe-66ed-4507-baed-e49b97e7a419 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Received event network-vif-plugged-be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.820906] env[62585]: DEBUG oslo_concurrency.lockutils [req-f6b53566-ef43-494b-a796-bb19645935b5 req-e620bbbe-66ed-4507-baed-e49b97e7a419 service nova] Acquiring lock "d8955c26-85d1-481c-b1d2-4879bb52158b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.821120] env[62585]: DEBUG oslo_concurrency.lockutils [req-f6b53566-ef43-494b-a796-bb19645935b5 req-e620bbbe-66ed-4507-baed-e49b97e7a419 service nova] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.821326] env[62585]: DEBUG oslo_concurrency.lockutils [req-f6b53566-ef43-494b-a796-bb19645935b5 req-e620bbbe-66ed-4507-baed-e49b97e7a419 service nova] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.821475] env[62585]: DEBUG nova.compute.manager [req-f6b53566-ef43-494b-a796-bb19645935b5 req-e620bbbe-66ed-4507-baed-e49b97e7a419 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] No waiting events found dispatching network-vif-plugged-be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1005.821649] env[62585]: WARNING nova.compute.manager [req-f6b53566-ef43-494b-a796-bb19645935b5 req-e620bbbe-66ed-4507-baed-e49b97e7a419 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Received unexpected event network-vif-plugged-be0d6bab-1253-458c-b3cd-71ed0eb87c2c for instance with vm_state building and task_state spawning. [ 1005.921586] env[62585]: DEBUG nova.network.neutron [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Successfully updated port: be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.064569] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a19434-1bf9-d25e-8b44-15449341f783, 'name': SearchDatastore_Task, 'duration_secs': 0.009796} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.065725] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22b41bd6-4e0b-4142-9659-a43a8b7ee6e0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.071083] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515884} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.071640] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.071855] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.072095] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e9e1709-08e2-41ad-b2ab-2884683757b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.074579] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1006.074579] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522ef934-02ec-422a-f0b0-c6e7fc3978b2" [ 1006.074579] env[62585]: _type = "Task" [ 1006.074579] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.079685] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1006.079685] env[62585]: value = "task-1385126" [ 1006.079685] env[62585]: _type = "Task" [ 1006.079685] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.082914] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522ef934-02ec-422a-f0b0-c6e7fc3978b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.089675] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.317237] env[62585]: DEBUG nova.compute.utils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.318688] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1006.319296] env[62585]: DEBUG nova.network.neutron [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.366839] env[62585]: DEBUG nova.policy [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1808605fbf174f1b847e3f066ba78d87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c48f11ec1fa84b4a96ef72198fcec3ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 1006.424541] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.424739] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.424914] env[62585]: DEBUG nova.network.neutron [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.585217] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522ef934-02ec-422a-f0b0-c6e7fc3978b2, 'name': SearchDatastore_Task, 'duration_secs': 0.008707} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.588362] env[62585]: DEBUG oslo_concurrency.lockutils [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.588646] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. {{(pid=62585) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1006.588900] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bd505aa-4dee-4ae7-9f7b-6b0c41da2800 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.595770] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063529} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.596779] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.597120] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1006.597120] env[62585]: value = "task-1385127" [ 1006.597120] env[62585]: _type = "Task" [ 1006.597120] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.597767] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21e2f1f-2832-4680-95db-6fac4cdef774 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.607483] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.622247] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.624535] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-826e96e7-ab4c-49ff-93e1-c2808864552f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.636604] env[62585]: DEBUG nova.network.neutron [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Successfully created port: ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.644441] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1006.644441] env[62585]: value = "task-1385128" [ 1006.644441] env[62585]: _type = "Task" [ 1006.644441] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.653393] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.755152] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.755386] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.755611] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.822384] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1006.958549] env[62585]: DEBUG nova.network.neutron [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.975786] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9058a6c4-5899-42ee-a7e4-8eec07128eb3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.988021] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0cee4d-23f7-4af1-8457-b90b6c24cbd2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.025460] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99168cf5-1046-4469-8827-29346d25c1b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.035716] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923a0608-992f-4d9b-abd2-435e40908496 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.054062] env[62585]: DEBUG nova.compute.provider_tree [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.111061] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385127, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.151102] env[62585]: DEBUG nova.network.neutron [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [{"id": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "address": "fa:16:3e:37:e6:f2", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe0d6bab-12", "ovs_interfaceid": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.156375] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.558135] env[62585]: DEBUG nova.scheduler.client.report [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.611910] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564582} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.612240] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. [ 1007.613061] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d921d399-a955-4f7d-b9c7-9efa4fd6a488 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.637680] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.638301] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2062cd3a-79d6-460c-99f3-794031557b0e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.654364] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.654685] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Instance network_info: |[{"id": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "address": "fa:16:3e:37:e6:f2", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe0d6bab-12", "ovs_interfaceid": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1007.655091] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:e6:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be0d6bab-1253-458c-b3cd-71ed0eb87c2c', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.662201] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating folder: Project (c48f11ec1fa84b4a96ef72198fcec3ef). Parent ref: group-v293962. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.663816] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d43a693-9b96-48cb-8098-c5af3a82be6b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.665358] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1007.665358] env[62585]: value = "task-1385129" [ 1007.665358] env[62585]: _type = "Task" [ 1007.665358] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.668428] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385128, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.676386] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.677552] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created folder: Project (c48f11ec1fa84b4a96ef72198fcec3ef) in parent group-v293962. [ 1007.677740] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating folder: Instances. Parent ref: group-v294077. {{(pid=62585) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.677958] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49b4fe9e-2c93-4ed9-9f33-c71b2f869ce1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.686880] env[62585]: INFO nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created folder: Instances in parent group-v294077. [ 1007.687088] env[62585]: DEBUG oslo.service.loopingcall [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.687283] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.687480] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-193ddd75-c69d-4a49-b888-b20ddd559c18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.706258] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.706258] env[62585]: value = "task-1385132" [ 1007.706258] env[62585]: _type = "Task" [ 1007.706258] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.713282] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385132, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.788690] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.788889] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.789092] env[62585]: DEBUG nova.network.neutron [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.832925] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1007.845441] env[62585]: DEBUG nova.compute.manager [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Received event network-changed-be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.845606] env[62585]: DEBUG nova.compute.manager [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Refreshing instance network info cache due to event network-changed-be0d6bab-1253-458c-b3cd-71ed0eb87c2c. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1007.845821] env[62585]: DEBUG oslo_concurrency.lockutils [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] Acquiring lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.846218] env[62585]: DEBUG oslo_concurrency.lockutils [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] Acquired lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.846218] env[62585]: DEBUG nova.network.neutron [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Refreshing network info cache for port be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.862694] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1007.862940] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1007.863115] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.863304] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1007.863453] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.863605] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1007.863810] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1007.863973] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1007.864152] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1007.864316] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1007.864490] env[62585]: DEBUG nova.virt.hardware [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1007.865685] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4300f7-954e-4d35-803b-2497a6ea4ae9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.873418] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead150e1-edde-48a4-8786-3a9d15ea824a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.064235] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.064827] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1008.160340] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385128, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.177736] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.215970] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385132, 'name': CreateVM_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.572019] env[62585]: DEBUG nova.compute.utils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1008.572019] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1008.572019] env[62585]: DEBUG nova.network.neutron [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1008.613114] env[62585]: DEBUG nova.policy [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7d82c678d64fba8373930238d5bb2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8121e0a00494834a580b940d36e0160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 1008.661127] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385128, 'name': ReconfigVM_Task, 'duration_secs': 1.569457} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.661364] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08/6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.662077] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbb17e9b-b19c-4f76-a3c8-a6b76cb7327f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.668241] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1008.668241] env[62585]: value = "task-1385133" [ 1008.668241] env[62585]: _type = "Task" [ 1008.668241] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.678639] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385133, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.681538] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385129, 'name': ReconfigVM_Task, 'duration_secs': 0.806068} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.682161] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.683645] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce41cfd8-31c3-4da9-8262-bccd684478a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.716901] env[62585]: DEBUG nova.network.neutron [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [{"id": "2eb59df2-5648-46be-995c-88785a05be2a", "address": "fa:16:3e:6a:e4:c5", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb59df2-56", "ovs_interfaceid": "2eb59df2-5648-46be-995c-88785a05be2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.723812] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c039e34-8942-4a78-9746-e99507abb142 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.741288] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385132, 'name': CreateVM_Task, 'duration_secs': 0.750266} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.742569] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.743021] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1008.743021] env[62585]: value = "task-1385134" [ 1008.743021] env[62585]: _type = "Task" [ 1008.743021] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.743793] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.745019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.745019] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.745019] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a75394d2-a7c6-4d0c-877a-bd46e892745b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.755092] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385134, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.756358] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1008.756358] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529fd1ed-8270-ac44-c03b-a391941e22ee" [ 1008.756358] env[62585]: _type = "Task" [ 1008.756358] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.763982] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529fd1ed-8270-ac44-c03b-a391941e22ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.797061] env[62585]: DEBUG nova.network.neutron [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updated VIF entry in instance network info cache for port be0d6bab-1253-458c-b3cd-71ed0eb87c2c. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.797061] env[62585]: DEBUG nova.network.neutron [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [{"id": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "address": "fa:16:3e:37:e6:f2", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe0d6bab-12", "ovs_interfaceid": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.872933] env[62585]: DEBUG nova.network.neutron [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Successfully updated port: ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.920821] env[62585]: DEBUG nova.network.neutron [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Successfully created port: 65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.074455] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1009.178022] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385133, 'name': Rename_Task, 'duration_secs': 0.128721} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.178281] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.178498] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5317c319-f762-4cf2-9d96-2bf416b8175d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.185145] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1009.185145] env[62585]: value = "task-1385135" [ 1009.185145] env[62585]: _type = "Task" [ 1009.185145] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.194529] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.236334] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.253413] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385134, 'name': ReconfigVM_Task, 'duration_secs': 0.137703} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.253715] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.253998] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2df2da5-9f21-4c7f-a369-dd7191afeb99 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.260500] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1009.260500] env[62585]: value = "task-1385136" [ 1009.260500] env[62585]: _type = "Task" [ 1009.260500] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.266682] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]529fd1ed-8270-ac44-c03b-a391941e22ee, 'name': SearchDatastore_Task, 'duration_secs': 0.012064} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.267251] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.267482] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.267714] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.267861] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.268053] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.268288] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24fa1ad0-2a1a-468e-a551-6e5a5869fbc3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.272473] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385136, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.290429] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.290609] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.291310] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d4c1dac-9bdf-4ba1-823b-073fc55af75b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.296566] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1009.296566] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5293cc2f-a5ed-6e9d-f6f2-234bb09415c7" [ 1009.296566] env[62585]: _type = "Task" [ 1009.296566] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.299664] env[62585]: DEBUG oslo_concurrency.lockutils [req-c34d1bb0-d635-498c-8cb3-a9dbcb142aa6 req-f453925e-a1af-4ced-b38a-de1de39e0d5f service nova] Releasing lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.304541] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5293cc2f-a5ed-6e9d-f6f2-234bb09415c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.374331] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.374476] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.374669] env[62585]: DEBUG nova.network.neutron [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.694594] env[62585]: DEBUG oslo_vmware.api [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385135, 'name': PowerOnVM_Task, 'duration_secs': 0.450757} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.694885] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.695111] env[62585]: DEBUG nova.compute.manager [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1009.695926] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3191e691-634a-4084-a289-e702dc2e481d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.759977] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f91b57-75f6-4d7e-9810-67e426245c8c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.770796] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385136, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.784289] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c28e8ca-24fa-4fa1-bb6d-27ebbe2a7abf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.791032] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 83 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1009.806915] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5293cc2f-a5ed-6e9d-f6f2-234bb09415c7, 'name': SearchDatastore_Task, 'duration_secs': 0.007666} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.807656] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83435de3-808c-4159-9cb6-b9027dad7285 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.812568] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1009.812568] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527ff3cf-1a9a-73f0-6654-c1d183b63676" [ 1009.812568] env[62585]: _type = "Task" [ 1009.812568] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.819899] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527ff3cf-1a9a-73f0-6654-c1d183b63676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.869779] env[62585]: DEBUG nova.compute.manager [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Received event network-vif-plugged-ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.870062] env[62585]: DEBUG oslo_concurrency.lockutils [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] Acquiring lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.870380] env[62585]: DEBUG oslo_concurrency.lockutils [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.870495] env[62585]: DEBUG oslo_concurrency.lockutils [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.870633] env[62585]: DEBUG nova.compute.manager [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] No waiting events found dispatching network-vif-plugged-ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1009.870799] env[62585]: WARNING nova.compute.manager [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Received unexpected event network-vif-plugged-ce4064ff-0c6e-4bbd-83dd-132713bb289f for instance with vm_state building and task_state spawning. [ 1009.871039] env[62585]: DEBUG nova.compute.manager [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Received event network-changed-ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.871217] env[62585]: DEBUG nova.compute.manager [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Refreshing instance network info cache due to event network-changed-ce4064ff-0c6e-4bbd-83dd-132713bb289f. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1009.871432] env[62585]: DEBUG oslo_concurrency.lockutils [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] Acquiring lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.916865] env[62585]: DEBUG nova.network.neutron [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1010.045990] env[62585]: DEBUG nova.network.neutron [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Updating instance_info_cache with network_info: [{"id": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "address": "fa:16:3e:a5:43:65", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4064ff-0c", "ovs_interfaceid": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.084252] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1010.108214] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1010.108472] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1010.108637] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.108826] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1010.108977] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.109143] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1010.109379] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1010.109557] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1010.109728] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1010.109913] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1010.110111] env[62585]: DEBUG nova.virt.hardware [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1010.110968] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf82e85-2145-443c-bfd9-c549a6fb1001 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.118995] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a05615-7006-4137-ba68-cf09267f3308 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.213734] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.213995] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.214200] env[62585]: DEBUG nova.objects.instance [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62585) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1010.272554] env[62585]: DEBUG oslo_vmware.api [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385136, 'name': PowerOnVM_Task, 'duration_secs': 0.60241} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.272554] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.275523] env[62585]: DEBUG nova.compute.manager [None req-30e0cae6-f237-478a-872a-58a9bd2e38fe tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1010.276344] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8418a8c2-cc1d-431f-8ab0-99fc9239b7f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.290916] env[62585]: DEBUG nova.compute.manager [req-2123860d-fd1d-46d8-9423-6bad2f4375fc req-403b6f7c-e3a0-4cf8-9fde-eef2b3caefbf service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Received event network-vif-plugged-65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.291143] env[62585]: DEBUG oslo_concurrency.lockutils [req-2123860d-fd1d-46d8-9423-6bad2f4375fc req-403b6f7c-e3a0-4cf8-9fde-eef2b3caefbf service nova] Acquiring lock "167b0fe3-d6e0-4249-90ab-7b1181669828-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.291380] env[62585]: DEBUG oslo_concurrency.lockutils [req-2123860d-fd1d-46d8-9423-6bad2f4375fc req-403b6f7c-e3a0-4cf8-9fde-eef2b3caefbf service nova] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.291606] env[62585]: DEBUG oslo_concurrency.lockutils [req-2123860d-fd1d-46d8-9423-6bad2f4375fc req-403b6f7c-e3a0-4cf8-9fde-eef2b3caefbf service nova] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.291782] env[62585]: DEBUG nova.compute.manager [req-2123860d-fd1d-46d8-9423-6bad2f4375fc req-403b6f7c-e3a0-4cf8-9fde-eef2b3caefbf service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] No waiting events found dispatching network-vif-plugged-65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1010.291949] env[62585]: WARNING nova.compute.manager [req-2123860d-fd1d-46d8-9423-6bad2f4375fc req-403b6f7c-e3a0-4cf8-9fde-eef2b3caefbf service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Received unexpected event network-vif-plugged-65b547d0-01e3-4d24-82d0-876644700248 for instance with vm_state building and task_state spawning. [ 1010.297123] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1010.297365] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3cfc86da-e792-4565-aa54-b4e4969e9581 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.303799] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1010.303799] env[62585]: value = "task-1385137" [ 1010.303799] env[62585]: _type = "Task" [ 1010.303799] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.311598] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.322144] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]527ff3cf-1a9a-73f0-6654-c1d183b63676, 'name': SearchDatastore_Task, 'duration_secs': 0.009911} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.322430] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.322718] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/d8955c26-85d1-481c-b1d2-4879bb52158b.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.322990] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85a06640-0836-4b2e-a539-4c2cf245d6fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.328990] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1010.328990] env[62585]: value = "task-1385138" [ 1010.328990] env[62585]: _type = "Task" [ 1010.328990] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.337238] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.379672] env[62585]: DEBUG nova.network.neutron [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Successfully updated port: 65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.548595] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.548854] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Instance network_info: |[{"id": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "address": "fa:16:3e:a5:43:65", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4064ff-0c", "ovs_interfaceid": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1010.549216] env[62585]: DEBUG oslo_concurrency.lockutils [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] Acquired lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.549811] env[62585]: DEBUG nova.network.neutron [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Refreshing network info cache for port ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.550727] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:43:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce4064ff-0c6e-4bbd-83dd-132713bb289f', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.558763] env[62585]: DEBUG oslo.service.loopingcall [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.562531] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.563048] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed0cc49d-1197-412d-a1e1-a31146dc604a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.584137] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.584137] env[62585]: value = "task-1385139" [ 1010.584137] env[62585]: _type = "Task" [ 1010.584137] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.595012] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385139, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.816163] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385137, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.841112] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385138, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.871629] env[62585]: DEBUG nova.network.neutron [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Updated VIF entry in instance network info cache for port ce4064ff-0c6e-4bbd-83dd-132713bb289f. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.872056] env[62585]: DEBUG nova.network.neutron [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Updating instance_info_cache with network_info: [{"id": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "address": "fa:16:3e:a5:43:65", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4064ff-0c", "ovs_interfaceid": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.882129] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "refresh_cache-167b0fe3-d6e0-4249-90ab-7b1181669828" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.882277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "refresh_cache-167b0fe3-d6e0-4249-90ab-7b1181669828" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.882449] env[62585]: DEBUG nova.network.neutron [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.093543] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385139, 'name': CreateVM_Task, 'duration_secs': 0.453553} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.093717] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.094432] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.094608] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.094931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.095193] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b895e4f1-dea0-4f11-ab78-094363620279 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.099798] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1011.099798] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5280f08e-b144-8b14-55c6-e0970f6117a1" [ 1011.099798] env[62585]: _type = "Task" [ 1011.099798] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.106942] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5280f08e-b144-8b14-55c6-e0970f6117a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.126437] env[62585]: INFO nova.compute.manager [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Unrescuing [ 1011.126664] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.126817] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquired lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.126982] env[62585]: DEBUG nova.network.neutron [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.205271] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.205550] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.205767] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.205957] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.206144] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.208239] env[62585]: INFO nova.compute.manager [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Terminating instance [ 1011.209895] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "refresh_cache-6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.210072] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquired lock "refresh_cache-6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.210242] env[62585]: DEBUG nova.network.neutron [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.225608] env[62585]: DEBUG oslo_concurrency.lockutils [None req-12184dd0-afca-4797-b477-f33e8d686fac tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.314471] env[62585]: DEBUG oslo_vmware.api [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385137, 'name': PowerOnVM_Task, 'duration_secs': 0.780456} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.314719] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1011.314900] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdecfe-26e7-4434-9cb0-444c3b949f54 tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance '0d256aa0-a873-4ff1-8c43-464d8b2d03a8' progress to 100 {{(pid=62585) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1011.337327] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385138, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537922} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.337557] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/d8955c26-85d1-481c-b1d2-4879bb52158b.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1011.337767] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1011.337999] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2b5425a-b8ba-4ed9-8b13-72ac5dd5fde2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.343692] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1011.343692] env[62585]: value = "task-1385140" [ 1011.343692] env[62585]: _type = "Task" [ 1011.343692] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.351420] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.374701] env[62585]: DEBUG oslo_concurrency.lockutils [req-2eeffad2-a4be-4bf8-8100-4202faa5b9a5 req-15eaa69f-4b0f-40b7-8b48-7a39831b15d7 service nova] Releasing lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.411661] env[62585]: DEBUG nova.network.neutron [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.528176] env[62585]: DEBUG nova.network.neutron [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Updating instance_info_cache with network_info: [{"id": "65b547d0-01e3-4d24-82d0-876644700248", "address": "fa:16:3e:da:99:b6", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b547d0-01", "ovs_interfaceid": "65b547d0-01e3-4d24-82d0-876644700248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.610455] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5280f08e-b144-8b14-55c6-e0970f6117a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010221} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.610758] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.610990] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.611241] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.611424] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.611602] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.611869] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4769ce88-ce43-4219-a20c-578b1b7bde4f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.624177] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.624440] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.625474] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ec5311a-62b4-4344-8737-d79416ea7a2d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.632386] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1011.632386] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52af2754-4ac5-7d31-c387-dbd02de89f60" [ 1011.632386] env[62585]: _type = "Task" [ 1011.632386] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.640042] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52af2754-4ac5-7d31-c387-dbd02de89f60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.727864] env[62585]: DEBUG nova.network.neutron [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.776293] env[62585]: DEBUG nova.network.neutron [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.809668] env[62585]: DEBUG nova.network.neutron [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updating instance_info_cache with network_info: [{"id": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "address": "fa:16:3e:52:fc:53", "network": {"id": "bd25be93-26eb-4d34-b141-5264c9d0539a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1411210261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ca61db1ff3fb4f5cae3dc18e70af8ba7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02d93fe0-63", "ovs_interfaceid": "02d93fe0-638c-43ca-8ed9-c67acc2340c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.854032] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080496} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.854354] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.855143] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256e2d90-83c0-4ead-8c3f-cf04c5c40a39 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.876592] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/d8955c26-85d1-481c-b1d2-4879bb52158b.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.876851] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0615afd0-b421-4290-8d6b-8f65e7fef220 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.896778] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1011.896778] env[62585]: value = "task-1385141" [ 1011.896778] env[62585]: _type = "Task" [ 1011.896778] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.907626] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385141, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.031024] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "refresh_cache-167b0fe3-d6e0-4249-90ab-7b1181669828" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.031384] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Instance network_info: |[{"id": "65b547d0-01e3-4d24-82d0-876644700248", "address": "fa:16:3e:da:99:b6", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b547d0-01", "ovs_interfaceid": "65b547d0-01e3-4d24-82d0-876644700248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1012.031860] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:99:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65b547d0-01e3-4d24-82d0-876644700248', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.040046] env[62585]: DEBUG oslo.service.loopingcall [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.040280] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1012.040554] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34e37ed3-940e-4903-8b8b-7bb427d69572 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.061146] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.061146] env[62585]: value = "task-1385142" [ 1012.061146] env[62585]: _type = "Task" [ 1012.061146] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.070581] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385142, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.144783] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52af2754-4ac5-7d31-c387-dbd02de89f60, 'name': SearchDatastore_Task, 'duration_secs': 0.044675} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.145601] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5260e25-bedc-4108-9456-7fdd89200c2e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.150530] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1012.150530] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522568df-4448-7fe4-bc61-c66e337a1306" [ 1012.150530] env[62585]: _type = "Task" [ 1012.150530] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.158061] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522568df-4448-7fe4-bc61-c66e337a1306, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.279038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Releasing lock "refresh_cache-6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.279479] env[62585]: DEBUG nova.compute.manager [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1012.279709] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.280637] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd11626-f53a-4024-9ed8-68f20a591de7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.288052] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.288312] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47beffe8-c59b-451b-8571-582e88d90acb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.294563] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1012.294563] env[62585]: value = "task-1385143" [ 1012.294563] env[62585]: _type = "Task" [ 1012.294563] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.302478] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.312306] env[62585]: DEBUG oslo_concurrency.lockutils [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Releasing lock "refresh_cache-92b90694-2bb1-431c-b2c0-ad2f229f4a75" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.312957] env[62585]: DEBUG nova.objects.instance [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lazy-loading 'flavor' on Instance uuid 92b90694-2bb1-431c-b2c0-ad2f229f4a75 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.327172] env[62585]: DEBUG nova.compute.manager [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Received event network-changed-65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.327396] env[62585]: DEBUG nova.compute.manager [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Refreshing instance network info cache due to event network-changed-65b547d0-01e3-4d24-82d0-876644700248. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1012.327915] env[62585]: DEBUG oslo_concurrency.lockutils [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] Acquiring lock "refresh_cache-167b0fe3-d6e0-4249-90ab-7b1181669828" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.328086] env[62585]: DEBUG oslo_concurrency.lockutils [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] Acquired lock "refresh_cache-167b0fe3-d6e0-4249-90ab-7b1181669828" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.328260] env[62585]: DEBUG nova.network.neutron [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Refreshing network info cache for port 65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.408947] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385141, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.571898] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385142, 'name': CreateVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.660920] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]522568df-4448-7fe4-bc61-c66e337a1306, 'name': SearchDatastore_Task, 'duration_secs': 0.014519} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.661401] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.661673] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/c31a584a-3dfb-4ec2-8852-e9e27cafcb2d.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.661959] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1169d05a-8316-4758-97d8-87142af295f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.668498] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1012.668498] env[62585]: value = "task-1385144" [ 1012.668498] env[62585]: _type = "Task" [ 1012.668498] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.675804] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.804635] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385143, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.819696] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c90a450-695d-405b-8cd8-9d89fbfa6737 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.850084] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.852507] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3ff1994-392c-4e5a-9024-06ad4fbe7e3d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.861107] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1012.861107] env[62585]: value = "task-1385145" [ 1012.861107] env[62585]: _type = "Task" [ 1012.861107] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.872172] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.910226] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385141, 'name': ReconfigVM_Task, 'duration_secs': 0.534808} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.910529] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Reconfigured VM instance instance-00000067 to attach disk [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/d8955c26-85d1-481c-b1d2-4879bb52158b.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.911200] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0d663c8-aec7-4a11-98b1-1634c85b122e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.919340] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1012.919340] env[62585]: value = "task-1385146" [ 1012.919340] env[62585]: _type = "Task" [ 1012.919340] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.931019] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385146, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.073309] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385142, 'name': CreateVM_Task, 'duration_secs': 0.975661} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.073483] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1013.074290] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.074508] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.074873] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1013.075209] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50c99270-4c3b-449e-8109-9a90242a7ba0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.082165] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1013.082165] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bc0dbd-55a7-e997-6141-1b08ef6509f8" [ 1013.082165] env[62585]: _type = "Task" [ 1013.082165] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.093406] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bc0dbd-55a7-e997-6141-1b08ef6509f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.094510] env[62585]: DEBUG nova.network.neutron [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Updated VIF entry in instance network info cache for port 65b547d0-01e3-4d24-82d0-876644700248. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.094905] env[62585]: DEBUG nova.network.neutron [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Updating instance_info_cache with network_info: [{"id": "65b547d0-01e3-4d24-82d0-876644700248", "address": "fa:16:3e:da:99:b6", "network": {"id": "ad2d9890-dc7e-408f-9efe-57ac216f344e", "bridge": "br-int", "label": "tempest-ServersTestJSON-870952527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8121e0a00494834a580b940d36e0160", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b547d0-01", "ovs_interfaceid": "65b547d0-01e3-4d24-82d0-876644700248", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.181171] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385144, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.306902] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385143, 'name': PowerOffVM_Task, 'duration_secs': 0.643352} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.307373] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.307745] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.308194] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23ed3b20-75b7-4ba9-8718-1462213e85d7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.340181] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1013.340394] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1013.340583] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Deleting the datastore file [datastore1] 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1013.340861] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a92fe96-d192-48c8-b808-c93be86e4bd0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.347925] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for the task: (returnval){ [ 1013.347925] env[62585]: value = "task-1385148" [ 1013.347925] env[62585]: _type = "Task" [ 1013.347925] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.356299] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.369765] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385145, 'name': PowerOffVM_Task, 'duration_secs': 0.368413} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.370030] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.375258] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1013.375506] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31aec8a1-f192-48cd-a13f-9f94dd7ce0d2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.388709] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.388931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.389126] env[62585]: DEBUG nova.compute.manager [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Going to confirm migration 2 {{(pid=62585) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1013.396830] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1013.396830] env[62585]: value = "task-1385149" [ 1013.396830] env[62585]: _type = "Task" [ 1013.396830] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.405598] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.429270] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385146, 'name': Rename_Task, 'duration_secs': 0.434498} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.429617] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.429879] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d818b04-0d02-4d16-baae-49dac4a71d2d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.436425] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1013.436425] env[62585]: value = "task-1385150" [ 1013.436425] env[62585]: _type = "Task" [ 1013.436425] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.445910] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.594055] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bc0dbd-55a7-e997-6141-1b08ef6509f8, 'name': SearchDatastore_Task, 'duration_secs': 0.054138} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.594055] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.594273] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1013.594458] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.594609] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.594789] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1013.595101] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-221c58bf-82f2-4b0f-add1-d9db17270392 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.597328] env[62585]: DEBUG oslo_concurrency.lockutils [req-683aae20-99ea-41a6-8d8f-25412886607c req-0a4224c1-3893-4729-a3ed-790d1dec9644 service nova] Releasing lock "refresh_cache-167b0fe3-d6e0-4249-90ab-7b1181669828" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.605191] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1013.605341] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1013.606259] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8536c75-cc4c-4c1f-973c-fd138e697d69 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.612629] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1013.612629] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52133028-b7bb-0d78-5f5f-21834205a827" [ 1013.612629] env[62585]: _type = "Task" [ 1013.612629] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.621420] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52133028-b7bb-0d78-5f5f-21834205a827, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.680167] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600404} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.680879] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/c31a584a-3dfb-4ec2-8852-e9e27cafcb2d.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.680879] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.681063] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bac5d864-2e3e-479b-a80b-1cc35ddbb950 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.689208] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1013.689208] env[62585]: value = "task-1385151" [ 1013.689208] env[62585]: _type = "Task" [ 1013.689208] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.699695] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.857685] env[62585]: DEBUG oslo_vmware.api [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Task: {'id': task-1385148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186698} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.857887] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.858091] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.858278] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.858454] env[62585]: INFO nova.compute.manager [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Took 1.58 seconds to destroy the instance on the hypervisor. [ 1013.858707] env[62585]: DEBUG oslo.service.loopingcall [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.858905] env[62585]: DEBUG nova.compute.manager [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1013.859009] env[62585]: DEBUG nova.network.neutron [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.876598] env[62585]: DEBUG nova.network.neutron [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1013.907221] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385149, 'name': ReconfigVM_Task, 'duration_secs': 0.348893} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.907495] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1013.907689] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.907934] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06f65de6-b564-45b1-be28-59d6b8a7061a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.916320] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1013.916320] env[62585]: value = "task-1385152" [ 1013.916320] env[62585]: _type = "Task" [ 1013.916320] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.925517] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.947573] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385150, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.948963] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.949185] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquired lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.949388] env[62585]: DEBUG nova.network.neutron [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.949610] env[62585]: DEBUG nova.objects.instance [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'info_cache' on Instance uuid 0d256aa0-a873-4ff1-8c43-464d8b2d03a8 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.124538] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52133028-b7bb-0d78-5f5f-21834205a827, 'name': SearchDatastore_Task, 'duration_secs': 0.011305} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.125295] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adae0bc8-6c58-458c-8cc8-ff28ea1347b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.130785] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1014.130785] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5284244d-8428-65f1-ab6c-a9953d38f4ca" [ 1014.130785] env[62585]: _type = "Task" [ 1014.130785] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.138941] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5284244d-8428-65f1-ab6c-a9953d38f4ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.199392] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073163} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.199686] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.200489] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7347203-dd59-4363-bbfc-0586c60b5276 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.222430] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/c31a584a-3dfb-4ec2-8852-e9e27cafcb2d.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.223069] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fd31d05-e86d-4c67-a7ea-1a7241c2a5f8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.242712] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1014.242712] env[62585]: value = "task-1385153" [ 1014.242712] env[62585]: _type = "Task" [ 1014.242712] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.252317] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.379087] env[62585]: DEBUG nova.network.neutron [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.427518] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385152, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.447811] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385150, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.641552] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5284244d-8428-65f1-ab6c-a9953d38f4ca, 'name': SearchDatastore_Task, 'duration_secs': 0.045107} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.641833] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.642169] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 167b0fe3-d6e0-4249-90ab-7b1181669828/167b0fe3-d6e0-4249-90ab-7b1181669828.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1014.642466] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c8d64cf-7730-41b6-94ca-36917f87ad92 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.649069] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1014.649069] env[62585]: value = "task-1385154" [ 1014.649069] env[62585]: _type = "Task" [ 1014.649069] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.656682] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.753836] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385153, 'name': ReconfigVM_Task, 'duration_secs': 0.370192} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.754146] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/c31a584a-3dfb-4ec2-8852-e9e27cafcb2d.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.754802] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d3aeccc-90db-4366-a644-cf4d3f41003e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.762054] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1014.762054] env[62585]: value = "task-1385155" [ 1014.762054] env[62585]: _type = "Task" [ 1014.762054] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.771599] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385155, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.882185] env[62585]: INFO nova.compute.manager [-] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Took 1.02 seconds to deallocate network for instance. [ 1014.928930] env[62585]: DEBUG oslo_vmware.api [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385152, 'name': PowerOnVM_Task, 'duration_secs': 0.639435} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.929170] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.929480] env[62585]: DEBUG nova.compute.manager [None req-59be32de-d68a-4906-bd32-5bb3e1f282c7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1014.930226] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72707dd1-d25f-496b-af7f-87a8c10a192a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.946873] env[62585]: DEBUG oslo_vmware.api [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385150, 'name': PowerOnVM_Task, 'duration_secs': 1.040822} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.947472] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.947472] env[62585]: INFO nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1014.947608] env[62585]: DEBUG nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1014.948392] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6deffa2-05a8-4c45-9553-5d9a881038b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.159715] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385154, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.160744] env[62585]: DEBUG nova.network.neutron [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [{"id": "2eb59df2-5648-46be-995c-88785a05be2a", "address": "fa:16:3e:6a:e4:c5", "network": {"id": "94d2a72d-60c8-4c57-b801-6ab7db25b80b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1062805711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c49ab537d42244f495aaa3cbdaafc6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eb59df2-56", "ovs_interfaceid": "2eb59df2-5648-46be-995c-88785a05be2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.274410] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385155, 'name': Rename_Task, 'duration_secs': 0.165464} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.274709] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.275044] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d1befb4-0235-4199-ac14-91dac55c20ef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.284063] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1015.284063] env[62585]: value = "task-1385156" [ 1015.284063] env[62585]: _type = "Task" [ 1015.284063] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.293764] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.390246] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.390659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.392316] env[62585]: DEBUG nova.objects.instance [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lazy-loading 'resources' on Instance uuid 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.468910] env[62585]: INFO nova.compute.manager [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Took 14.16 seconds to build instance. [ 1015.660949] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.891955} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.661264] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 167b0fe3-d6e0-4249-90ab-7b1181669828/167b0fe3-d6e0-4249-90ab-7b1181669828.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.661555] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1015.661846] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dac40eb8-5f90-4144-9042-b01e3a22e9e8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.664120] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Releasing lock "refresh_cache-0d256aa0-a873-4ff1-8c43-464d8b2d03a8" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.664356] env[62585]: DEBUG nova.objects.instance [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lazy-loading 'migration_context' on Instance uuid 0d256aa0-a873-4ff1-8c43-464d8b2d03a8 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.671587] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1015.671587] env[62585]: value = "task-1385157" [ 1015.671587] env[62585]: _type = "Task" [ 1015.671587] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.682221] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.796176] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385156, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.972713] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bde336a3-3d24-444f-8320-c5e902aa0ef5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.671s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.009815] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964db509-c503-48e3-bea9-279408612fe9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.018056] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2a2ac1-2c9d-46b8-a104-b7d17a1f83a5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.049786] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba080ac4-5a8d-486e-aec2-202ba1615be8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.057967] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c6b193-a776-4843-9460-1496b9d31673 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.071539] env[62585]: DEBUG nova.compute.provider_tree [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.083840] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.084120] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.084333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.084520] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.084692] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.086508] env[62585]: INFO nova.compute.manager [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Terminating instance [ 1016.088275] env[62585]: DEBUG nova.compute.manager [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1016.088472] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.089215] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f1ad31-1c33-41ea-9f45-36ab789b9ffc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.105637] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.105873] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13154cd7-251c-4d39-a96e-7dfcd5c67a70 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.112333] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1016.112333] env[62585]: value = "task-1385158" [ 1016.112333] env[62585]: _type = "Task" [ 1016.112333] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.120016] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.167491] env[62585]: DEBUG nova.objects.base [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Object Instance<0d256aa0-a873-4ff1-8c43-464d8b2d03a8> lazy-loaded attributes: info_cache,migration_context {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1016.168470] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746d4f2c-a752-4667-86e2-fce92155865d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.189615] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a383ef1-b194-484e-8cc9-a71a3565a588 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.194449] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077679} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.195112] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1016.195904] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b145bdd-06eb-4352-b9fe-da14615d176e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.201560] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1016.201560] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52363ec3-67ad-395d-d37d-5567fc661955" [ 1016.201560] env[62585]: _type = "Task" [ 1016.201560] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.219200] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 167b0fe3-d6e0-4249-90ab-7b1181669828/167b0fe3-d6e0-4249-90ab-7b1181669828.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1016.222460] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18d91da9-6bee-4347-a502-67ddadef22a1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.242265] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52363ec3-67ad-395d-d37d-5567fc661955, 'name': SearchDatastore_Task, 'duration_secs': 0.014255} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.243538] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.243874] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1016.243874] env[62585]: value = "task-1385159" [ 1016.243874] env[62585]: _type = "Task" [ 1016.243874] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.251401] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.294987] env[62585]: DEBUG oslo_vmware.api [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385156, 'name': PowerOnVM_Task, 'duration_secs': 0.544935} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.295230] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.295443] env[62585]: INFO nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1016.295719] env[62585]: DEBUG nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1016.296497] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e420f56-2722-444e-938a-beee61a322b8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.575112] env[62585]: DEBUG nova.scheduler.client.report [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.623063] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385158, 'name': PowerOffVM_Task, 'duration_secs': 0.358088} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.623429] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.623595] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.623858] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0ddf002-7c79-4948-be15-fe17713539d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.720390] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1016.720390] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1016.720390] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Deleting the datastore file [datastore2] 92b90694-2bb1-431c-b2c0-ad2f229f4a75 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.720679] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c521e788-1985-42d6-9f45-fc13324efe2d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.728252] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1016.728252] env[62585]: value = "task-1385161" [ 1016.728252] env[62585]: _type = "Task" [ 1016.728252] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.736810] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.754489] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385159, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.816129] env[62585]: INFO nova.compute.manager [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Took 15.10 seconds to build instance. [ 1017.080792] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.083269] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.840s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.105195] env[62585]: INFO nova.scheduler.client.report [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Deleted allocations for instance 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08 [ 1017.239701] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.254573] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385159, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.318938] env[62585]: DEBUG oslo_concurrency.lockutils [None req-80762a7f-9616-4bde-af75-afa57d012d06 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.610s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.453354] env[62585]: INFO nova.compute.manager [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Rescuing [ 1017.453717] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.453925] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.454162] env[62585]: DEBUG nova.network.neutron [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.612293] env[62585]: DEBUG oslo_concurrency.lockutils [None req-fff0ec60-493b-4a96-801c-06af59757e82 tempest-ServerShowV254Test-802977328 tempest-ServerShowV254Test-802977328-project-member] Lock "6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.407s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.688195] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c84a1d-73bd-4737-9baf-876bcd2491e1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.696613] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d03569-53cf-4d21-89e9-a3cc029a0fe4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.729653] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec2afd3-2a25-436a-82e9-80dec96b15b6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.740758] env[62585]: DEBUG oslo_vmware.api [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.517967} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.742921] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.743144] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.743333] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.743518] env[62585]: INFO nova.compute.manager [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1017.743766] env[62585]: DEBUG oslo.service.loopingcall [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.744029] env[62585]: DEBUG nova.compute.manager [-] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1017.744128] env[62585]: DEBUG nova.network.neutron [-] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1017.746614] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7980ceea-097d-413f-99da-c7efd3401e56 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.763952] env[62585]: DEBUG nova.compute.provider_tree [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.768820] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385159, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.027987] env[62585]: DEBUG nova.compute.manager [req-542255fa-81b7-46dd-b0fa-d39d2cf0c2b3 req-7a79473f-bc4c-4677-91e7-a4000b7afeaf service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Received event network-vif-deleted-02d93fe0-638c-43ca-8ed9-c67acc2340c0 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.028242] env[62585]: INFO nova.compute.manager [req-542255fa-81b7-46dd-b0fa-d39d2cf0c2b3 req-7a79473f-bc4c-4677-91e7-a4000b7afeaf service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Neutron deleted interface 02d93fe0-638c-43ca-8ed9-c67acc2340c0; detaching it from the instance and deleting it from the info cache [ 1018.028418] env[62585]: DEBUG nova.network.neutron [req-542255fa-81b7-46dd-b0fa-d39d2cf0c2b3 req-7a79473f-bc4c-4677-91e7-a4000b7afeaf service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.213463] env[62585]: DEBUG nova.network.neutron [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Updating instance_info_cache with network_info: [{"id": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "address": "fa:16:3e:a5:43:65", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce4064ff-0c", "ovs_interfaceid": "ce4064ff-0c6e-4bbd-83dd-132713bb289f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.263092] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385159, 'name': ReconfigVM_Task, 'duration_secs': 1.967755} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.263405] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 167b0fe3-d6e0-4249-90ab-7b1181669828/167b0fe3-d6e0-4249-90ab-7b1181669828.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.264070] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-541b8c5a-4cae-4e57-95bd-ac67fb7a0144 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.270764] env[62585]: DEBUG nova.scheduler.client.report [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1018.275221] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1018.275221] env[62585]: value = "task-1385162" [ 1018.275221] env[62585]: _type = "Task" [ 1018.275221] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.284306] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385162, 'name': Rename_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.490647] env[62585]: DEBUG nova.network.neutron [-] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.530958] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9957a91f-ac1f-43bb-92fc-d4cc1f916f5f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.541369] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd22799-342f-4d1a-8e48-e905b34e3bef {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.571685] env[62585]: DEBUG nova.compute.manager [req-542255fa-81b7-46dd-b0fa-d39d2cf0c2b3 req-7a79473f-bc4c-4677-91e7-a4000b7afeaf service nova] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Detach interface failed, port_id=02d93fe0-638c-43ca-8ed9-c67acc2340c0, reason: Instance 92b90694-2bb1-431c-b2c0-ad2f229f4a75 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1018.716445] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.790309] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385162, 'name': Rename_Task, 'duration_secs': 0.184672} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.790679] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.790981] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5184ae56-9c66-4b0f-b95c-2f06e9374401 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.798389] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1018.798389] env[62585]: value = "task-1385163" [ 1018.798389] env[62585]: _type = "Task" [ 1018.798389] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.807536] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.993808] env[62585]: INFO nova.compute.manager [-] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Took 1.25 seconds to deallocate network for instance. [ 1019.248102] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.248426] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84cbffcc-1dba-472a-ae01-15de3461dc1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.257421] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1019.257421] env[62585]: value = "task-1385164" [ 1019.257421] env[62585]: _type = "Task" [ 1019.257421] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.266814] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.285365] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.202s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.308853] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385163, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.499972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.500296] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.500528] env[62585]: DEBUG nova.objects.instance [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lazy-loading 'resources' on Instance uuid 92b90694-2bb1-431c-b2c0-ad2f229f4a75 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.767712] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385164, 'name': PowerOffVM_Task, 'duration_secs': 0.197544} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.768194] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.768825] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f27f8d-c8b0-4919-b606-227f3883b13b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.787963] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2447cd85-8880-4887-9532-7859d08fff97 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.808350] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385163, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.817991] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.818302] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fa71a3d-0da8-4fdb-a627-0d47cf620a67 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.825864] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1019.825864] env[62585]: value = "task-1385165" [ 1019.825864] env[62585]: _type = "Task" [ 1019.825864] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.834126] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.847640] env[62585]: INFO nova.scheduler.client.report [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocation for migration 39ea0e61-87e1-4e55-843c-715aed911150 [ 1020.100284] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b53a94-fcf6-490c-afe9-e5c765974af6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.108893] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed16c031-e8bf-4add-91de-ede1332f5307 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.139323] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70be4438-87e0-4737-a257-d5648b678dd6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.148208] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bdf346-d1f7-4f81-91cd-adac2c0a7819 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.162237] env[62585]: DEBUG nova.compute.provider_tree [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.310411] env[62585]: DEBUG oslo_vmware.api [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385163, 'name': PowerOnVM_Task, 'duration_secs': 1.151178} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.310697] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1020.310909] env[62585]: INFO nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1020.311110] env[62585]: DEBUG nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1020.311960] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f875b276-156f-4870-9267-bd2d5ce8998e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.339139] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1020.339448] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.339754] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.339909] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.340759] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.340759] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a20dacc-c246-4d4c-84aa-9c7e5f67b681 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.351074] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.351326] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.352900] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.964s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.354016] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d27ff3a1-b30b-4a0d-b054-7ddbc28ffcb9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.361759] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1020.361759] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2915c-effd-81f5-bac0-c3ea9807031e" [ 1020.361759] env[62585]: _type = "Task" [ 1020.361759] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.371893] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2915c-effd-81f5-bac0-c3ea9807031e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.666353] env[62585]: DEBUG nova.scheduler.client.report [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.830911] env[62585]: INFO nova.compute.manager [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Took 18.08 seconds to build instance. [ 1020.874549] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c2915c-effd-81f5-bac0-c3ea9807031e, 'name': SearchDatastore_Task, 'duration_secs': 0.012335} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.875964] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-698471a2-0088-467f-ae1b-0489f928b571 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.882738] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1020.882738] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c11c2d-d8de-4abc-58f2-c5ff3e03971b" [ 1020.882738] env[62585]: _type = "Task" [ 1020.882738] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.892326] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c11c2d-d8de-4abc-58f2-c5ff3e03971b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.174337] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.191543] env[62585]: INFO nova.scheduler.client.report [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Deleted allocations for instance 92b90694-2bb1-431c-b2c0-ad2f229f4a75 [ 1021.332426] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d8a8f465-4554-47b5-8630-e229c3e84dad tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.592s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.395049] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52c11c2d-d8de-4abc-58f2-c5ff3e03971b, 'name': SearchDatastore_Task, 'duration_secs': 0.015494} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.395217] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.395339] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. {{(pid=62585) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1021.395624] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-446d2981-9f17-4693-8660-f20c656891f8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.404271] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1021.404271] env[62585]: value = "task-1385166" [ 1021.404271] env[62585]: _type = "Task" [ 1021.404271] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.408590] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.408844] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.409076] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.409270] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.409442] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.412042] env[62585]: INFO nova.compute.manager [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Terminating instance [ 1021.414466] env[62585]: DEBUG nova.compute.manager [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1021.414676] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.415499] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d9b364-c75e-464f-a381-9bd4e256d8fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.421380] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.427015] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.427263] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f34fdbd-bd8b-4272-b7ee-4285380f7ad5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.434747] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1021.434747] env[62585]: value = "task-1385167" [ 1021.434747] env[62585]: _type = "Task" [ 1021.434747] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.446646] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.700559] env[62585]: DEBUG oslo_concurrency.lockutils [None req-dad1ca28-e34a-4fd2-8dd5-cd492fc8b44e tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "92b90694-2bb1-431c-b2c0-ad2f229f4a75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.616s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.917535] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385166, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.945833] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385167, 'name': PowerOffVM_Task, 'duration_secs': 0.338535} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.946157] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.946351] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.946651] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e2cf087-06d1-427b-98b6-5f2356e0ac1f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.984690] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "167b0fe3-d6e0-4249-90ab-7b1181669828" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.984935] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.985280] env[62585]: DEBUG nova.compute.manager [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1021.986423] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b2f26e-75bf-4a51-a7c6-06eff90f55f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.995014] env[62585]: DEBUG nova.compute.manager [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62585) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1021.995775] env[62585]: DEBUG nova.objects.instance [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lazy-loading 'flavor' on Instance uuid 167b0fe3-d6e0-4249-90ab-7b1181669828 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.068974] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.069730] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.069730] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleting the datastore file [datastore1] 0d256aa0-a873-4ff1-8c43-464d8b2d03a8 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.070039] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbcef11b-12ca-4d97-8498-c72eee92994d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.078980] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for the task: (returnval){ [ 1022.078980] env[62585]: value = "task-1385169" [ 1022.078980] env[62585]: _type = "Task" [ 1022.078980] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.087885] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.416460] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385166, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67229} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.416784] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. [ 1022.417575] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f27d6ea-7adb-41cf-8e6d-829a8c40e2cb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.443617] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.443925] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-383dceb5-5320-4840-a135-35678403004d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.463302] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1022.463302] env[62585]: value = "task-1385170" [ 1022.463302] env[62585]: _type = "Task" [ 1022.463302] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.471547] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385170, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.501917] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.502193] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68eddcdb-fc35-4aed-9463-a464f1d27ea4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.508944] env[62585]: DEBUG oslo_vmware.api [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1022.508944] env[62585]: value = "task-1385171" [ 1022.508944] env[62585]: _type = "Task" [ 1022.508944] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.519640] env[62585]: DEBUG oslo_vmware.api [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.590058] env[62585]: DEBUG oslo_vmware.api [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Task: {'id': task-1385169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429315} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.590207] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.590388] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.590577] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.590763] env[62585]: INFO nova.compute.manager [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1022.591016] env[62585]: DEBUG oslo.service.loopingcall [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.591218] env[62585]: DEBUG nova.compute.manager [-] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1022.591318] env[62585]: DEBUG nova.network.neutron [-] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1022.797700] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "b7b8338a-2e9f-4854-8f4d-ede21b150317" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.797967] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.798222] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "b7b8338a-2e9f-4854-8f4d-ede21b150317-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.798416] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.798589] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.801597] env[62585]: INFO nova.compute.manager [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Terminating instance [ 1022.804193] env[62585]: DEBUG nova.compute.manager [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1022.804507] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.805723] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efdf1a9-29f0-4024-b845-5c2969a1ee35 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.816068] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.816362] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a227e470-c4f6-4e34-9279-becc68dcf252 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.824613] env[62585]: DEBUG oslo_vmware.api [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1022.824613] env[62585]: value = "task-1385172" [ 1022.824613] env[62585]: _type = "Task" [ 1022.824613] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.836279] env[62585]: DEBUG oslo_vmware.api [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.874652] env[62585]: DEBUG nova.compute.manager [req-f9ce55e6-6614-4929-a731-777fbcd95dc8 req-a8fada6d-e27c-414f-98e8-4b4981552523 service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Received event network-vif-deleted-2eb59df2-5648-46be-995c-88785a05be2a {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1022.874847] env[62585]: INFO nova.compute.manager [req-f9ce55e6-6614-4929-a731-777fbcd95dc8 req-a8fada6d-e27c-414f-98e8-4b4981552523 service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Neutron deleted interface 2eb59df2-5648-46be-995c-88785a05be2a; detaching it from the instance and deleting it from the info cache [ 1022.875029] env[62585]: DEBUG nova.network.neutron [req-f9ce55e6-6614-4929-a731-777fbcd95dc8 req-a8fada6d-e27c-414f-98e8-4b4981552523 service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.973971] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385170, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.020164] env[62585]: DEBUG oslo_vmware.api [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.336973] env[62585]: DEBUG oslo_vmware.api [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385172, 'name': PowerOffVM_Task, 'duration_secs': 0.23607} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.337495] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.337742] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1023.338085] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b9f5458-0f0b-460c-8134-72646aacee17 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.349670] env[62585]: DEBUG nova.network.neutron [-] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.377404] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4af9b881-3831-4950-b95c-64350d6867da {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.388416] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6415f0b9-8916-4b7c-a0e9-5ebf1627f00f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.405334] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1023.405563] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1023.405817] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Deleting the datastore file [datastore1] b7b8338a-2e9f-4854-8f4d-ede21b150317 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.406127] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-389ce665-90e8-44bf-a4d8-124bdfa84b2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.415561] env[62585]: DEBUG oslo_vmware.api [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for the task: (returnval){ [ 1023.415561] env[62585]: value = "task-1385174" [ 1023.415561] env[62585]: _type = "Task" [ 1023.415561] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.424067] env[62585]: DEBUG nova.compute.manager [req-f9ce55e6-6614-4929-a731-777fbcd95dc8 req-a8fada6d-e27c-414f-98e8-4b4981552523 service nova] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Detach interface failed, port_id=2eb59df2-5648-46be-995c-88785a05be2a, reason: Instance 0d256aa0-a873-4ff1-8c43-464d8b2d03a8 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1023.433548] env[62585]: DEBUG oslo_vmware.api [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.476025] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385170, 'name': ReconfigVM_Task, 'duration_secs': 0.514994} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.476439] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.477265] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501c593e-e01c-4b29-a1ad-350d1251e299 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.504185] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ceb8713-6c9b-4e08-ae22-30b762ef132d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.523357] env[62585]: DEBUG oslo_vmware.api [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385171, 'name': PowerOffVM_Task, 'duration_secs': 0.943294} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.524696] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.524907] env[62585]: DEBUG nova.compute.manager [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1023.525274] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1023.525274] env[62585]: value = "task-1385175" [ 1023.525274] env[62585]: _type = "Task" [ 1023.525274] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.525983] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a365138-04e3-4f5b-b404-74da15f94e1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.536694] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.852902] env[62585]: INFO nova.compute.manager [-] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Took 1.26 seconds to deallocate network for instance. [ 1023.935684] env[62585]: DEBUG oslo_vmware.api [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Task: {'id': task-1385174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222465} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.935684] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.935862] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.936053] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.936241] env[62585]: INFO nova.compute.manager [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1023.936488] env[62585]: DEBUG oslo.service.loopingcall [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.936710] env[62585]: DEBUG nova.compute.manager [-] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1023.936789] env[62585]: DEBUG nova.network.neutron [-] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1024.038633] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385175, 'name': ReconfigVM_Task, 'duration_secs': 0.158567} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.038900] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.039160] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d7c65d3-fb1d-4606-a136-d016f75221a7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.044414] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1995b4a3-f513-408e-a09c-78c1829aeab6 tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.046632] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1024.046632] env[62585]: value = "task-1385176" [ 1024.046632] env[62585]: _type = "Task" [ 1024.046632] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.054899] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.361274] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.361575] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.361820] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.387885] env[62585]: INFO nova.scheduler.client.report [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Deleted allocations for instance 0d256aa0-a873-4ff1-8c43-464d8b2d03a8 [ 1024.559902] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385176, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.646280] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "167b0fe3-d6e0-4249-90ab-7b1181669828" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.646576] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.646792] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "167b0fe3-d6e0-4249-90ab-7b1181669828-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.646985] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.647182] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.649523] env[62585]: INFO nova.compute.manager [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Terminating instance [ 1024.651317] env[62585]: DEBUG nova.compute.manager [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1024.651887] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.652513] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795d5cbe-230b-4558-9ee4-bb4da4a37ac3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.660432] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.660682] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f3cee67-abf3-466a-ad63-b50c4861002a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.694273] env[62585]: DEBUG nova.network.neutron [-] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.898225] env[62585]: DEBUG nova.compute.manager [req-68b97a94-b39a-428a-96a6-6d07bbad41c2 req-81946e82-b395-44ef-a71e-617291d7afc5 service nova] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Received event network-vif-deleted-d006570b-3bb8-443a-8eb9-f4d5dcc7c366 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.898761] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a9656f8b-f7fc-4dd2-85c4-83392b15a19c tempest-DeleteServersTestJSON-1905740495 tempest-DeleteServersTestJSON-1905740495-project-member] Lock "0d256aa0-a873-4ff1-8c43-464d8b2d03a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.490s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.058234] env[62585]: DEBUG oslo_vmware.api [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385176, 'name': PowerOnVM_Task, 'duration_secs': 0.645061} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.058617] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.061128] env[62585]: DEBUG nova.compute.manager [None req-e11a4d71-a81d-421e-867c-c2c89eae67b9 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1025.061899] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f73d2b-9c17-4f8d-922a-ced7121c856e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.196899] env[62585]: INFO nova.compute.manager [-] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Took 1.26 seconds to deallocate network for instance. [ 1025.705136] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.705136] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.705136] env[62585]: DEBUG nova.objects.instance [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lazy-loading 'resources' on Instance uuid b7b8338a-2e9f-4854-8f4d-ede21b150317 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.288234] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffb02d9-cac5-41d9-97d3-1edcaa93d5c2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.296685] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5aabebe-3dad-416d-8a85-e7fac02fee40 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.336527] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288056b5-8a6a-4aef-85d3-a47405d678c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.346417] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f3781f-114c-4ffc-87e9-dae9d3db0e00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.361478] env[62585]: DEBUG nova.compute.provider_tree [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.544602] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.545533] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.545533] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1026.867721] env[62585]: DEBUG nova.scheduler.client.report [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.372582] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.394919] env[62585]: INFO nova.scheduler.client.report [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Deleted allocations for instance b7b8338a-2e9f-4854-8f4d-ede21b150317 [ 1027.822777] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.823014] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.823218] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleting the datastore file [datastore1] 167b0fe3-d6e0-4249-90ab-7b1181669828 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.824053] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abcfdb6e-037d-4c13-8a66-de935b598699 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.830454] env[62585]: DEBUG oslo_vmware.api [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for the task: (returnval){ [ 1027.830454] env[62585]: value = "task-1385178" [ 1027.830454] env[62585]: _type = "Task" [ 1027.830454] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.839159] env[62585]: DEBUG oslo_vmware.api [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.903787] env[62585]: DEBUG oslo_concurrency.lockutils [None req-16a31799-6b5f-42c8-886c-f079b95695a7 tempest-ServerRescueTestJSON-445206306 tempest-ServerRescueTestJSON-445206306-project-member] Lock "b7b8338a-2e9f-4854-8f4d-ede21b150317" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.106s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.971545] env[62585]: INFO nova.compute.manager [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Rescuing [ 1027.972043] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.972335] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.972615] env[62585]: DEBUG nova.network.neutron [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.341558] env[62585]: DEBUG oslo_vmware.api [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Task: {'id': task-1385178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163542} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.341558] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.341558] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.342356] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.342356] env[62585]: INFO nova.compute.manager [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Took 3.69 seconds to destroy the instance on the hypervisor. [ 1028.342356] env[62585]: DEBUG oslo.service.loopingcall [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.342356] env[62585]: DEBUG nova.compute.manager [-] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1028.342475] env[62585]: DEBUG nova.network.neutron [-] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1028.709245] env[62585]: DEBUG nova.network.neutron [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [{"id": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "address": "fa:16:3e:37:e6:f2", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe0d6bab-12", "ovs_interfaceid": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.779064] env[62585]: DEBUG nova.compute.manager [req-d86b0d6c-bf42-4310-98c5-7afcfbb02d0b req-09d8abe0-9df7-46b0-86af-d407e4d2c822 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Received event network-vif-deleted-65b547d0-01e3-4d24-82d0-876644700248 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.779293] env[62585]: INFO nova.compute.manager [req-d86b0d6c-bf42-4310-98c5-7afcfbb02d0b req-09d8abe0-9df7-46b0-86af-d407e4d2c822 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Neutron deleted interface 65b547d0-01e3-4d24-82d0-876644700248; detaching it from the instance and deleting it from the info cache [ 1028.779472] env[62585]: DEBUG nova.network.neutron [req-d86b0d6c-bf42-4310-98c5-7afcfbb02d0b req-09d8abe0-9df7-46b0-86af-d407e4d2c822 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.212454] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.247233] env[62585]: DEBUG nova.network.neutron [-] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.282410] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-471f268e-7342-4851-b731-71fc91c47db5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.295301] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6869c0a-5fb0-467b-b7f3-3bd6acee190a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.324484] env[62585]: DEBUG nova.compute.manager [req-d86b0d6c-bf42-4310-98c5-7afcfbb02d0b req-09d8abe0-9df7-46b0-86af-d407e4d2c822 service nova] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Detach interface failed, port_id=65b547d0-01e3-4d24-82d0-876644700248, reason: Instance 167b0fe3-d6e0-4249-90ab-7b1181669828 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1029.566063] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Didn't find any instances for network info cache update. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1029.566247] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.566412] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.566560] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.566706] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.566846] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.566986] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.567132] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1029.567278] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.743116] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.743462] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6dc33984-df72-47d3-ba29-b2a87b1312fa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.749758] env[62585]: INFO nova.compute.manager [-] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Took 1.41 seconds to deallocate network for instance. [ 1029.755304] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1029.755304] env[62585]: value = "task-1385179" [ 1029.755304] env[62585]: _type = "Task" [ 1029.755304] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.765863] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.070766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.070766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.071622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.071622] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1030.072078] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3a7783-2be9-4263-836d-dc89d09ca039 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.081894] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e511c1-7358-49fc-9dc0-3d857a171301 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.095851] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df64391-b622-480d-893b-eb22670f6ee8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.102453] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbb5145-d143-41ca-8f32-4082909f16f2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.130944] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180869MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1030.131121] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.131309] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.260950] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.267307] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385179, 'name': PowerOffVM_Task, 'duration_secs': 0.22902} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.267609] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1030.268392] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdbb047-7033-4a1f-926c-28c8e1fd4c6c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.287216] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047174e6-011a-4729-9342-1bd4612a819a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.319292] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.319662] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1077a00-00c1-4b30-9299-91abf82efeee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.328146] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1030.328146] env[62585]: value = "task-1385180" [ 1030.328146] env[62585]: _type = "Task" [ 1030.328146] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.338181] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1030.338445] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.338747] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.338962] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.339220] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.339498] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9617e5c2-8925-422f-8bb8-8c5e254d4e1a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.348689] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.348850] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.349784] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e58dcff-d040-4b23-9e2f-c48bbcb9d9b4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.357720] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1030.357720] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b41537-f92d-9b84-54db-b0598a4987d7" [ 1030.357720] env[62585]: _type = "Task" [ 1030.357720] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.365315] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b41537-f92d-9b84-54db-b0598a4987d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.868901] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52b41537-f92d-9b84-54db-b0598a4987d7, 'name': SearchDatastore_Task, 'duration_secs': 0.015425} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.869684] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0397ae79-711a-4113-9f44-bb2e1c6adff6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.875630] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1030.875630] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f87661-facc-969d-430f-d10792b9c7e2" [ 1030.875630] env[62585]: _type = "Task" [ 1030.875630] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.884341] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f87661-facc-969d-430f-d10792b9c7e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.157414] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1031.157569] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d8955c26-85d1-481c-b1d2-4879bb52158b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1031.157689] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance c31a584a-3dfb-4ec2-8852-e9e27cafcb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1031.157830] env[62585]: WARNING nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 167b0fe3-d6e0-4249-90ab-7b1181669828 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1031.158014] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1031.158160] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1031.212043] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07681d05-d096-4ac7-9399-c44ac322d084 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.219766] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70de5c6b-f24a-4289-af16-1ad6917c5d3c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.248594] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be479d94-97f7-476a-8c90-b32dff7541f4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.255813] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff5c761-c6d7-4a15-b788-0e25b1eb0b96 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.268764] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.387978] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f87661-facc-969d-430f-d10792b9c7e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010077} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.388243] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.388499] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. {{(pid=62585) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1031.388755] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6dd0ee3-84f7-4cdc-923b-4225b1709e83 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.396729] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1031.396729] env[62585]: value = "task-1385181" [ 1031.396729] env[62585]: _type = "Task" [ 1031.396729] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.404353] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.772240] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.906513] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459241} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.906831] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. [ 1031.907563] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b7687a-d126-426f-b191-5e02f69b9d73 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.932199] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.932487] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4a32982-0306-46a0-b155-d69e86157a24 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.957431] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1031.957431] env[62585]: value = "task-1385182" [ 1031.957431] env[62585]: _type = "Task" [ 1031.957431] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.966295] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385182, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.277349] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1032.277573] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.146s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.277866] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.017s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.278101] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.301467] env[62585]: INFO nova.scheduler.client.report [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Deleted allocations for instance 167b0fe3-d6e0-4249-90ab-7b1181669828 [ 1032.467845] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385182, 'name': ReconfigVM_Task, 'duration_secs': 0.488288} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.468163] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Reconfigured VM instance instance-00000067 to attach disk [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.468942] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437f41db-3925-42ed-84aa-793aa62a82f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.493598] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64d4d4a4-e66c-4027-bd11-745bf41f57b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.508863] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1032.508863] env[62585]: value = "task-1385183" [ 1032.508863] env[62585]: _type = "Task" [ 1032.508863] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.516737] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.809277] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7cb97d8-7c0e-46c1-9731-2a987af7bd3b tempest-ServersTestJSON-1776640796 tempest-ServersTestJSON-1776640796-project-member] Lock "167b0fe3-d6e0-4249-90ab-7b1181669828" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.162s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.019233] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385183, 'name': ReconfigVM_Task, 'duration_secs': 0.495742} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.019537] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.019779] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd962e00-5ad9-4842-b18a-020c55b3cbe3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.026370] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1033.026370] env[62585]: value = "task-1385184" [ 1033.026370] env[62585]: _type = "Task" [ 1033.026370] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.035487] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.536348] env[62585]: DEBUG oslo_vmware.api [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385184, 'name': PowerOnVM_Task, 'duration_secs': 0.414828} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.536621] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.539550] env[62585]: DEBUG nova.compute.manager [None req-c94e804b-63b9-4e38-873c-799ca81ff9b2 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1033.540331] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7bd674-a8a5-4da2-ba4b-ad940a02d7f3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.936437] env[62585]: INFO nova.compute.manager [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Unrescuing [ 1034.936767] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.936891] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.937087] env[62585]: DEBUG nova.network.neutron [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.647413] env[62585]: DEBUG nova.network.neutron [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [{"id": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "address": "fa:16:3e:37:e6:f2", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe0d6bab-12", "ovs_interfaceid": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.149931] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.150661] env[62585]: DEBUG nova.objects.instance [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'flavor' on Instance uuid d8955c26-85d1-481c-b1d2-4879bb52158b {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.657069] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1980d982-28f6-4e10-a397-8611ce6f1e21 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.678373] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.678650] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3bc58d95-f10a-458d-a9b9-f1586f323255 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.686357] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1036.686357] env[62585]: value = "task-1385185" [ 1036.686357] env[62585]: _type = "Task" [ 1036.686357] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.694454] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.197028] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385185, 'name': PowerOffVM_Task, 'duration_secs': 0.218491} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.197028] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.202012] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1037.202391] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54d2c5d8-798c-4c23-b054-f832d6f2c00a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.223605] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1037.223605] env[62585]: value = "task-1385186" [ 1037.223605] env[62585]: _type = "Task" [ 1037.223605] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.231902] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.737796] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385186, 'name': ReconfigVM_Task, 'duration_secs': 0.252821} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.738093] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1037.738291] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.738535] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5de9e7d-b210-485e-8e0e-e40426007788 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.745713] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1037.745713] env[62585]: value = "task-1385187" [ 1037.745713] env[62585]: _type = "Task" [ 1037.745713] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.753101] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385187, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.255548] env[62585]: DEBUG oslo_vmware.api [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385187, 'name': PowerOnVM_Task, 'duration_secs': 0.342588} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.255830] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.256079] env[62585]: DEBUG nova.compute.manager [None req-a7bb9725-51b8-4f68-be36-7bc47129b0c5 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1038.256834] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988a21aa-aed8-463d-8d60-cd759c62d88c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.314087] env[62585]: DEBUG oslo_concurrency.lockutils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.314356] env[62585]: DEBUG oslo_concurrency.lockutils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.817116] env[62585]: DEBUG nova.compute.utils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1039.319609] env[62585]: DEBUG oslo_concurrency.lockutils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.380185] env[62585]: DEBUG oslo_concurrency.lockutils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.380477] env[62585]: DEBUG oslo_concurrency.lockutils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.380730] env[62585]: INFO nova.compute.manager [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Attaching volume 38c07403-58dd-40d1-bbb3-a38edf442862 to /dev/sdb [ 1040.411930] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfd413f-08b0-4e23-8fe4-97f3a530b109 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.419519] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599d0794-f8cb-4731-9eca-b6aad9c35d53 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.434437] env[62585]: DEBUG nova.virt.block_device [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating existing volume attachment record: 0d6ad04b-49c1-42b2-a4b4-56a2cb0dbe52 {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1041.811575] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.811911] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.314188] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1042.836302] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.836593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.838157] env[62585]: INFO nova.compute.claims [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.901678] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3f11a5-444d-4441-91d0-3f4b64d51387 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.909770] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bf55b9-58a5-4384-b5e5-158b21a88f56 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.939215] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa5f221-c9b6-4f8e-8f45-f62364f95dfa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.946446] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dfb154-24d2-4314-8b7b-e0b090e47aaa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.960306] env[62585]: DEBUG nova.compute.provider_tree [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.463234] env[62585]: DEBUG nova.scheduler.client.report [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.969546] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.132s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.970013] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1044.977805] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Volume attach. Driver type: vmdk {{(pid=62585) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1044.978155] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294083', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'name': 'volume-38c07403-58dd-40d1-bbb3-a38edf442862', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bb076a4e-eb38-4d0c-bdea-f8ebb46d7968', 'attached_at': '', 'detached_at': '', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'serial': '38c07403-58dd-40d1-bbb3-a38edf442862'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1044.979365] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d77cfba-7e65-41d7-8ebc-d7ef97a16258 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.003708] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ceba67-fc6b-4f3b-81fd-85347d120f6e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.028982] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-38c07403-58dd-40d1-bbb3-a38edf442862/volume-38c07403-58dd-40d1-bbb3-a38edf442862.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.029510] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52ed8ad3-7158-4bba-ab2b-ad6b4ad02d9c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.047793] env[62585]: DEBUG oslo_vmware.api [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1045.047793] env[62585]: value = "task-1385192" [ 1045.047793] env[62585]: _type = "Task" [ 1045.047793] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.056775] env[62585]: DEBUG oslo_vmware.api [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385192, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.475764] env[62585]: DEBUG nova.compute.utils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1045.477215] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1045.477465] env[62585]: DEBUG nova.network.neutron [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.524742] env[62585]: DEBUG nova.policy [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1808605fbf174f1b847e3f066ba78d87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c48f11ec1fa84b4a96ef72198fcec3ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 1045.557974] env[62585]: DEBUG oslo_vmware.api [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385192, 'name': ReconfigVM_Task, 'duration_secs': 0.329077} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.558343] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-38c07403-58dd-40d1-bbb3-a38edf442862/volume-38c07403-58dd-40d1-bbb3-a38edf442862.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.563726] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d384bc4-1571-4002-bb53-35b8ccfa5cf7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.579351] env[62585]: DEBUG oslo_vmware.api [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1045.579351] env[62585]: value = "task-1385193" [ 1045.579351] env[62585]: _type = "Task" [ 1045.579351] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.587528] env[62585]: DEBUG oslo_vmware.api [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.788882] env[62585]: DEBUG nova.network.neutron [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Successfully created port: 2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.980149] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1046.091020] env[62585]: DEBUG oslo_vmware.api [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385193, 'name': ReconfigVM_Task, 'duration_secs': 0.169311} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.091336] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294083', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'name': 'volume-38c07403-58dd-40d1-bbb3-a38edf442862', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bb076a4e-eb38-4d0c-bdea-f8ebb46d7968', 'attached_at': '', 'detached_at': '', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'serial': '38c07403-58dd-40d1-bbb3-a38edf442862'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1046.993020] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1047.017381] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1047.017633] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1047.017791] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.017975] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1047.018136] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.018284] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1047.018485] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1047.018643] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1047.018809] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1047.018970] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1047.019163] env[62585]: DEBUG nova.virt.hardware [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1047.020035] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c2f46d-acf4-4e9e-8e7a-d5e58d35e372 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.028492] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eece922-1aa1-4f6a-890c-6a890ab97c40 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.128574] env[62585]: DEBUG nova.objects.instance [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.165976] env[62585]: DEBUG nova.compute.manager [req-4d9b8174-5c8e-4344-be95-4a17faeb2b8c req-0bfc0199-bf78-4a90-8f97-460934f6abbe service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received event network-vif-plugged-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1047.166230] env[62585]: DEBUG oslo_concurrency.lockutils [req-4d9b8174-5c8e-4344-be95-4a17faeb2b8c req-0bfc0199-bf78-4a90-8f97-460934f6abbe service nova] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.166443] env[62585]: DEBUG oslo_concurrency.lockutils [req-4d9b8174-5c8e-4344-be95-4a17faeb2b8c req-0bfc0199-bf78-4a90-8f97-460934f6abbe service nova] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.166616] env[62585]: DEBUG oslo_concurrency.lockutils [req-4d9b8174-5c8e-4344-be95-4a17faeb2b8c req-0bfc0199-bf78-4a90-8f97-460934f6abbe service nova] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.166784] env[62585]: DEBUG nova.compute.manager [req-4d9b8174-5c8e-4344-be95-4a17faeb2b8c req-0bfc0199-bf78-4a90-8f97-460934f6abbe service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] No waiting events found dispatching network-vif-plugged-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1047.166951] env[62585]: WARNING nova.compute.manager [req-4d9b8174-5c8e-4344-be95-4a17faeb2b8c req-0bfc0199-bf78-4a90-8f97-460934f6abbe service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received unexpected event network-vif-plugged-2f56c908-7d1b-4833-88be-c915dc3e5385 for instance with vm_state building and task_state spawning. [ 1047.249208] env[62585]: DEBUG nova.network.neutron [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Successfully updated port: 2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1047.633581] env[62585]: DEBUG oslo_concurrency.lockutils [None req-05e48993-e055-4e50-b290-8e4531871611 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.253s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.713602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.713602] env[62585]: DEBUG oslo_concurrency.lockutils [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.713602] env[62585]: DEBUG nova.compute.manager [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1047.714532] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed0e235-41ca-47ad-b350-3d8f27999c94 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.722069] env[62585]: DEBUG nova.compute.manager [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62585) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1047.722637] env[62585]: DEBUG nova.objects.instance [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.751546] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.751714] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.751853] env[62585]: DEBUG nova.network.neutron [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.228312] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.228660] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9fb41bc-35ee-4217-94ed-6e7d231d07c5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.236773] env[62585]: DEBUG oslo_vmware.api [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1048.236773] env[62585]: value = "task-1385194" [ 1048.236773] env[62585]: _type = "Task" [ 1048.236773] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.245651] env[62585]: DEBUG oslo_vmware.api [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.283826] env[62585]: DEBUG nova.network.neutron [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1048.458319] env[62585]: DEBUG nova.network.neutron [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.747253] env[62585]: DEBUG oslo_vmware.api [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385194, 'name': PowerOffVM_Task, 'duration_secs': 0.181934} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.747539] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.747720] env[62585]: DEBUG nova.compute.manager [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1048.748467] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27de06a5-790e-4369-81c3-39de6f250f79 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.961503] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.961864] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Instance network_info: |[{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1048.962331] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:bd:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35342bcb-8b06-472e-b3c0-43fd3d6c4b30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f56c908-7d1b-4833-88be-c915dc3e5385', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1048.969967] env[62585]: DEBUG oslo.service.loopingcall [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1048.970250] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1048.970485] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0bec86f-1649-4aa5-82e1-e5c84f6efc1f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.990488] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1048.990488] env[62585]: value = "task-1385195" [ 1048.990488] env[62585]: _type = "Task" [ 1048.990488] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.998150] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385195, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.190983] env[62585]: DEBUG nova.compute.manager [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.191203] env[62585]: DEBUG nova.compute.manager [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing instance network info cache due to event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1049.191430] env[62585]: DEBUG oslo_concurrency.lockutils [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.191580] env[62585]: DEBUG oslo_concurrency.lockutils [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.191752] env[62585]: DEBUG nova.network.neutron [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1049.260446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-87bf01d0-50fd-407c-8510-009ee2b5030b tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.547s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.501544] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385195, 'name': CreateVM_Task, 'duration_secs': 0.322933} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.501710] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.502446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.502621] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.502975] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1049.503246] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c765267-6ec4-4054-bd70-36ff19ebc760 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.507976] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1049.507976] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52da86b9-322f-e7b5-df4f-dea82e2b9cab" [ 1049.507976] env[62585]: _type = "Task" [ 1049.507976] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.516061] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52da86b9-322f-e7b5-df4f-dea82e2b9cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.879379] env[62585]: DEBUG nova.network.neutron [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updated VIF entry in instance network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1049.879771] env[62585]: DEBUG nova.network.neutron [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.018967] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52da86b9-322f-e7b5-df4f-dea82e2b9cab, 'name': SearchDatastore_Task, 'duration_secs': 0.009938} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.019604] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.019843] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.020096] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.020251] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.020435] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.020701] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03b6c73c-3d31-4fc1-a4cf-fef398294fa8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.029642] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.029828] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.030549] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13078b5a-3b1c-407d-bbe3-f10d3417de79 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.035640] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1050.035640] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bb2764-6d75-d2fd-8277-1f0afd55c04d" [ 1050.035640] env[62585]: _type = "Task" [ 1050.035640] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.043384] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bb2764-6d75-d2fd-8277-1f0afd55c04d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.136552] env[62585]: DEBUG nova.objects.instance [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.382728] env[62585]: DEBUG oslo_concurrency.lockutils [req-127e06c3-b1a0-4d75-afff-24a448489ed3 req-60a1e6cb-f6e6-49e9-a654-70d92ca314f4 service nova] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.545992] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52bb2764-6d75-d2fd-8277-1f0afd55c04d, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.546752] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a7c8fa2-e0ad-4a07-958d-5584f7e9a26a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.551891] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1050.551891] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a450e6-3b83-6254-222c-c9648bf2e49c" [ 1050.551891] env[62585]: _type = "Task" [ 1050.551891] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.559672] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a450e6-3b83-6254-222c-c9648bf2e49c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.641661] env[62585]: DEBUG oslo_concurrency.lockutils [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.641832] env[62585]: DEBUG oslo_concurrency.lockutils [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.642047] env[62585]: DEBUG nova.network.neutron [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.642246] env[62585]: DEBUG nova.objects.instance [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'info_cache' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.062273] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52a450e6-3b83-6254-222c-c9648bf2e49c, 'name': SearchDatastore_Task, 'duration_secs': 0.010215} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.062538] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.062806] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/fa4c721c-3455-48c9-bbca-4b4dac29aff6.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1051.063081] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79fa4eb6-7d49-4058-8b83-c8b145bce287 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.070057] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1051.070057] env[62585]: value = "task-1385196" [ 1051.070057] env[62585]: _type = "Task" [ 1051.070057] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.076951] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.146053] env[62585]: DEBUG nova.objects.base [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1051.579906] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453685} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.580264] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/fa4c721c-3455-48c9-bbca-4b4dac29aff6.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.580389] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.580584] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b0e6a24-cb30-4923-ae45-d766fcaf7675 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.586579] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1051.586579] env[62585]: value = "task-1385197" [ 1051.586579] env[62585]: _type = "Task" [ 1051.586579] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.594077] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.876048] env[62585]: DEBUG nova.network.neutron [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.099453] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063629} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.099673] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.100466] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39e9d5e-e343-4729-ba91-8c7a4fc4db87 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.121950] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/fa4c721c-3455-48c9-bbca-4b4dac29aff6.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.122211] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-728e1c59-28fb-4ea4-8fc8-4e907538c7b7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.140448] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1052.140448] env[62585]: value = "task-1385198" [ 1052.140448] env[62585]: _type = "Task" [ 1052.140448] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.148133] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.378389] env[62585]: DEBUG oslo_concurrency.lockutils [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.653056] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.882118] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.882930] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8face84-d37a-46b9-9096-51288797e4a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.890336] env[62585]: DEBUG oslo_vmware.api [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1052.890336] env[62585]: value = "task-1385199" [ 1052.890336] env[62585]: _type = "Task" [ 1052.890336] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.897966] env[62585]: DEBUG oslo_vmware.api [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.150547] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385198, 'name': ReconfigVM_Task, 'duration_secs': 0.563922} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.150817] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfigured VM instance instance-0000006a to attach disk [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/fa4c721c-3455-48c9-bbca-4b4dac29aff6.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.151455] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f60fef4a-ab6d-4c42-b523-a7271b67e8de {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.158095] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1053.158095] env[62585]: value = "task-1385200" [ 1053.158095] env[62585]: _type = "Task" [ 1053.158095] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.165367] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385200, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.399637] env[62585]: DEBUG oslo_vmware.api [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385199, 'name': PowerOnVM_Task, 'duration_secs': 0.421181} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.399850] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.400027] env[62585]: DEBUG nova.compute.manager [None req-65dafa26-fea2-4bb2-93ec-4b07a2bc1252 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1053.400788] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b00b3c-530e-4317-88d1-1b00f81108af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.667847] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385200, 'name': Rename_Task, 'duration_secs': 0.151837} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.668246] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.668376] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88b17297-1b4b-45c5-a47d-1b1d67976d76 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.673830] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1053.673830] env[62585]: value = "task-1385201" [ 1053.673830] env[62585]: _type = "Task" [ 1053.673830] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.680846] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.184035] env[62585]: DEBUG oslo_vmware.api [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385201, 'name': PowerOnVM_Task, 'duration_secs': 0.434748} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.184325] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.184546] env[62585]: INFO nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1054.184728] env[62585]: DEBUG nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1054.185466] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a1a32d-413d-42a2-98ee-133ab3dffd37 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.702180] env[62585]: INFO nova.compute.manager [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Took 11.88 seconds to build instance. [ 1054.973842] env[62585]: DEBUG nova.compute.manager [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1054.974076] env[62585]: DEBUG nova.compute.manager [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing instance network info cache due to event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1054.974310] env[62585]: DEBUG oslo_concurrency.lockutils [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.974459] env[62585]: DEBUG oslo_concurrency.lockutils [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.974622] env[62585]: DEBUG nova.network.neutron [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1055.203526] env[62585]: DEBUG oslo_concurrency.lockutils [None req-1d69edfc-0e3c-4b10-b26f-ba88d6843831 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.391s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.691170] env[62585]: DEBUG nova.network.neutron [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updated VIF entry in instance network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1055.691568] env[62585]: DEBUG nova.network.neutron [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.194844] env[62585]: DEBUG oslo_concurrency.lockutils [req-5cf3c061-e098-43f8-b282-c8193240a508 req-503ba4db-cbcd-4977-879a-247aeaf91182 service nova] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.063063] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.063424] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.063485] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.063620] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.063759] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1060.063937] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1061.561529] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1061.562013] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.064556] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.064803] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.064972] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.065142] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1062.066037] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2be229c-c31b-430e-9b0a-73381523cd21 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.074692] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29e2dfc-4f53-4296-a554-a6b75782c08d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.088608] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f77e7d6-051e-48d0-9751-8df20caa1e2b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.095086] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8664a55-988d-4099-8d2b-c75e44374387 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.123094] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180558MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1062.123337] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.123476] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.151765] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.152126] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d8955c26-85d1-481c-b1d2-4879bb52158b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.152126] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance c31a584a-3dfb-4ec2-8852-e9e27cafcb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.152189] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance fa4c721c-3455-48c9-bbca-4b4dac29aff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1063.152475] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1063.152666] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1063.207673] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f237b9fb-0105-4f27-b3b8-9b6690422ec9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.215101] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89c21a4-dfb9-4d24-99ee-0e1580cbc2f9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.243970] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579b8ca1-68f4-4836-8294-d25ef64999f0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.250930] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40022cf6-a0cc-4c0f-a701-3d5709cdeb0f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.263510] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.766626] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1064.271210] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1064.271593] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.148s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.772425] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1064.772593] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1064.772714] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Rebuilding the list of instances to heal {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1065.304699] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.304907] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquired lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.305044] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Forcefully refreshing network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1065.305228] env[62585]: DEBUG nova.objects.instance [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lazy-loading 'info_cache' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.032551] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.534994] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Releasing lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.535176] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updated the network info_cache for instance {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1067.535392] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.535549] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.535724] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.535849] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Cleaning up deleted instances with incomplete migration {{(pid=62585) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1068.566521] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.072043] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.072233] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Cleaning up deleted instances {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1069.582814] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] There are 34 instances to clean {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1069.583179] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 167b0fe3-d6e0-4249-90ab-7b1181669828] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.086692] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 92b90694-2bb1-431c-b2c0-ad2f229f4a75] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.589537] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6b39ab02-7c61-4d7f-b8ef-d9eebe0cef08] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.093158] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 474d033c-5bf2-4b6a-95be-f865e8f5dfc9] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.596344] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 0d256aa0-a873-4ff1-8c43-464d8b2d03a8] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.099618] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 8d84e240-2dc3-4680-9ee7-b705d4e7749a] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.603118] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: b7b8338a-2e9f-4854-8f4d-ede21b150317] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.106748] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: b0885bdd-bc8d-4311-8388-54bdc22144c2] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.610621] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 65ed4088-2cc5-4c00-94af-f714ec608fd8] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.114564] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: a26fb190-e6e6-48ab-a1d6-c662421a965f] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.617650] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: c8f23f36-b035-467e-959a-37fc0b6462ad] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.120640] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: e4edc1dd-52ea-428e-832a-b49d3bc4fe14] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.623620] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 54f542b5-3aba-49d6-a487-62714416b86f] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.126710] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: b6186aef-8f4c-409a-83aa-1548545ea7c4] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.629650] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 3abb84ea-b613-4956-a64f-c4ad230343c2] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.133199] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 01941b61-1960-4360-9dd0-513d5597bc70] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.636839] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: fcbbc06c-71fa-4891-8bfc-0de746b9e622] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.140457] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 2cf85b78-df04-40d0-a7db-5e8979574d0a] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.643454] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 4b080cc3-e1cc-4b64-9926-c37b891444f5] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.146462] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 891e5a42-3681-47eb-ac88-015fa21a6580] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.649920] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: ddb1103d-a846-4229-b441-de45424b4ec9] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.153959] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 95de3c81-b764-4594-af86-66df7814d7aa] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.658315] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 62e3b57b-6c9c-4f3c-8a47-efb5fbed801f] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.162091] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d96a04d7-b07f-439d-aafa-09dc70a4d1a7] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.666642] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: b2d2a012-a62f-4237-95c3-d7153d6b223c] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.170023] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 70ac6289-2f14-4fb0-a811-97d76cafc532] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.673828] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: a634a80e-d90a-4ce3-8233-75657a7754be] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.176636] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d2c6418c-b070-4c46-824b-18638e9b569f] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.679734] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d644c700-c5d1-4549-b73b-0573f268dc40] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.183688] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 67e5af2f-4eec-41ec-916f-9f9b77596943] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.686851] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 679380d4-5b96-4c30-bac9-f7163f19c609] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.191054] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: abf4a205-fcee-46e4-85b6-10a452cc0312] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.694239] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 6057e13b-71df-458d-b6ed-c139a8c57836] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.198379] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: 8763a058-b453-4f03-9532-7d7e65efdfb2] Instance has had 0 of 5 cleanup attempts {{(pid=62585) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.535222] env[62585]: DEBUG oslo_concurrency.lockutils [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.535639] env[62585]: DEBUG oslo_concurrency.lockutils [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.039642] env[62585]: INFO nova.compute.manager [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Detaching volume 38c07403-58dd-40d1-bbb3-a38edf442862 [ 1091.069877] env[62585]: INFO nova.virt.block_device [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Attempting to driver detach volume 38c07403-58dd-40d1-bbb3-a38edf442862 from mountpoint /dev/sdb [ 1091.070119] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Volume detach. Driver type: vmdk {{(pid=62585) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1091.070325] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294083', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'name': 'volume-38c07403-58dd-40d1-bbb3-a38edf442862', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bb076a4e-eb38-4d0c-bdea-f8ebb46d7968', 'attached_at': '', 'detached_at': '', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'serial': '38c07403-58dd-40d1-bbb3-a38edf442862'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1091.071208] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceb5d0a-12c7-469a-b9e6-15b2892afe84 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.092321] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc25993b-5578-4315-82d7-2da8fc4d8a7b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.098643] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a536ca5-32f2-4466-859b-a8426d940aee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.117923] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d950c8d-83cf-44e5-92c0-0b4ea2ab9f13 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.131487] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] The volume has not been displaced from its original location: [datastore2] volume-38c07403-58dd-40d1-bbb3-a38edf442862/volume-38c07403-58dd-40d1-bbb3-a38edf442862.vmdk. No consolidation needed. {{(pid=62585) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1091.136676] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1091.136928] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee08c072-40e3-4abc-bb7c-da7b5bd7a5e9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.153959] env[62585]: DEBUG oslo_vmware.api [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1091.153959] env[62585]: value = "task-1385202" [ 1091.153959] env[62585]: _type = "Task" [ 1091.153959] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.160990] env[62585]: DEBUG oslo_vmware.api [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.662883] env[62585]: DEBUG oslo_vmware.api [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385202, 'name': ReconfigVM_Task, 'duration_secs': 0.253767} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.663286] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1091.667724] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-059ebb73-28e7-4ae8-b9ec-c1f303dc4310 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.681546] env[62585]: DEBUG oslo_vmware.api [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1091.681546] env[62585]: value = "task-1385203" [ 1091.681546] env[62585]: _type = "Task" [ 1091.681546] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.688688] env[62585]: DEBUG oslo_vmware.api [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385203, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.849697] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.849945] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.191154] env[62585]: DEBUG oslo_vmware.api [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385203, 'name': ReconfigVM_Task, 'duration_secs': 0.121314} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.191467] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294083', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'name': 'volume-38c07403-58dd-40d1-bbb3-a38edf442862', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bb076a4e-eb38-4d0c-bdea-f8ebb46d7968', 'attached_at': '', 'detached_at': '', 'volume_id': '38c07403-58dd-40d1-bbb3-a38edf442862', 'serial': '38c07403-58dd-40d1-bbb3-a38edf442862'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1092.353051] env[62585]: DEBUG nova.compute.utils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1092.731074] env[62585]: DEBUG nova.objects.instance [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.855891] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.687523] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.738162] env[62585]: DEBUG oslo_concurrency.lockutils [None req-67c0bbf3-c089-404f-afb7-dafa9c61df27 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.739588] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.052s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.739754] env[62585]: DEBUG nova.compute.manager [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1093.740778] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f11f2d-fe9f-4037-a9df-05f1f22f5f1d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.747423] env[62585]: DEBUG nova.compute.manager [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62585) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1093.747956] env[62585]: DEBUG nova.objects.instance [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.908024] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.908308] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.908561] env[62585]: INFO nova.compute.manager [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Attaching volume 0049076e-e184-464d-bb32-a8231468072e to /dev/sdb [ 1093.938492] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b8944e-18f6-4caf-a27e-7aa9b963cf59 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.945779] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b70d8af-7cc0-41e2-bbaf-fefbd2aea923 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.959414] env[62585]: DEBUG nova.virt.block_device [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating existing volume attachment record: c4c17b27-079e-41cd-9844-6688b09df613 {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1094.253476] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1094.253885] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0871eb6f-f245-4f0c-ad89-c39cf55334a2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.260484] env[62585]: DEBUG oslo_vmware.api [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1094.260484] env[62585]: value = "task-1385207" [ 1094.260484] env[62585]: _type = "Task" [ 1094.260484] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.269177] env[62585]: DEBUG oslo_vmware.api [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.772407] env[62585]: DEBUG oslo_vmware.api [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385207, 'name': PowerOffVM_Task, 'duration_secs': 0.187443} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.772769] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.772809] env[62585]: DEBUG nova.compute.manager [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1094.773565] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdda74e-d426-490c-b993-c6c504f2a8db {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.285347] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b4518f48-e2cf-4251-9158-5a145d85875f tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.097787] env[62585]: DEBUG nova.objects.instance [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.603258] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.603469] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.603684] env[62585]: DEBUG nova.network.neutron [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1096.603905] env[62585]: DEBUG nova.objects.instance [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'info_cache' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.107528] env[62585]: DEBUG nova.objects.base [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62585) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1097.809035] env[62585]: DEBUG nova.network.neutron [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [{"id": "7e93d590-92de-4cbe-9262-4085c844ee88", "address": "fa:16:3e:2a:2c:ee", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93d590-92", "ovs_interfaceid": "7e93d590-92de-4cbe-9262-4085c844ee88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.312188] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "refresh_cache-bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.504141] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Volume attach. Driver type: vmdk {{(pid=62585) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1098.504412] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294086', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'name': 'volume-0049076e-e184-464d-bb32-a8231468072e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa4c721c-3455-48c9-bbca-4b4dac29aff6', 'attached_at': '', 'detached_at': '', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'serial': '0049076e-e184-464d-bb32-a8231468072e'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1098.505311] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9c00f1-8e22-411c-82b8-db47b2766cfa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.521649] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25635683-94fd-479b-8357-c5025b7fb9b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.546079] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-0049076e-e184-464d-bb32-a8231468072e/volume-0049076e-e184-464d-bb32-a8231468072e.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.546337] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8c024e5-ba95-474e-af59-3f08ee7476ea {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.564524] env[62585]: DEBUG oslo_vmware.api [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1098.564524] env[62585]: value = "task-1385209" [ 1098.564524] env[62585]: _type = "Task" [ 1098.564524] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.575282] env[62585]: DEBUG oslo_vmware.api [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385209, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.815370] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.815664] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05ce0b3b-d9c5-425b-bcc4-47cad050a0c9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.823257] env[62585]: DEBUG oslo_vmware.api [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1098.823257] env[62585]: value = "task-1385210" [ 1098.823257] env[62585]: _type = "Task" [ 1098.823257] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.830890] env[62585]: DEBUG oslo_vmware.api [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.074659] env[62585]: DEBUG oslo_vmware.api [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385209, 'name': ReconfigVM_Task, 'duration_secs': 0.316357} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.074894] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-0049076e-e184-464d-bb32-a8231468072e/volume-0049076e-e184-464d-bb32-a8231468072e.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.079488] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24fddbc4-982a-4d06-b5b3-7e4e774c5281 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.094252] env[62585]: DEBUG oslo_vmware.api [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1099.094252] env[62585]: value = "task-1385211" [ 1099.094252] env[62585]: _type = "Task" [ 1099.094252] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.102279] env[62585]: DEBUG oslo_vmware.api [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.332849] env[62585]: DEBUG oslo_vmware.api [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385210, 'name': PowerOnVM_Task, 'duration_secs': 0.387836} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.333322] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.333322] env[62585]: DEBUG nova.compute.manager [None req-e03f5f81-b7a5-4ca6-8d79-3bf0cd5fc378 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1099.334082] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33607be1-fea5-4503-af66-5c8ad40d4746 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.604611] env[62585]: DEBUG oslo_vmware.api [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385211, 'name': ReconfigVM_Task, 'duration_secs': 0.135971} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.604932] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294086', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'name': 'volume-0049076e-e184-464d-bb32-a8231468072e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa4c721c-3455-48c9-bbca-4b4dac29aff6', 'attached_at': '', 'detached_at': '', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'serial': '0049076e-e184-464d-bb32-a8231468072e'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1100.640662] env[62585]: DEBUG nova.objects.instance [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'flavor' on Instance uuid fa4c721c-3455-48c9-bbca-4b4dac29aff6 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.145457] env[62585]: DEBUG oslo_concurrency.lockutils [None req-c6aa9747-7878-4012-b257-bbf52d5ebcc0 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.237s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.259636] env[62585]: INFO nova.compute.manager [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Rescuing [ 1101.259914] env[62585]: DEBUG oslo_concurrency.lockutils [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.260155] env[62585]: DEBUG oslo_concurrency.lockutils [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.260333] env[62585]: DEBUG nova.network.neutron [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1101.963612] env[62585]: DEBUG nova.network.neutron [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.467065] env[62585]: DEBUG oslo_concurrency.lockutils [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.997946] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.998377] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49f34cc3-d8e5-4612-8238-75eca110e166 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.007035] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1103.007035] env[62585]: value = "task-1385212" [ 1103.007035] env[62585]: _type = "Task" [ 1103.007035] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.017122] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.517095] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385212, 'name': PowerOffVM_Task, 'duration_secs': 0.196063} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.517375] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.518157] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebffb81-2894-4406-8216-10dd8d3374ab {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.538853] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cfb218-3bcb-44c0-ab6d-7da044e01dee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.564936] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.565255] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f1bb3b9-916a-41a8-b97e-c4a4495fcccc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.571112] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1103.571112] env[62585]: value = "task-1385213" [ 1103.571112] env[62585]: _type = "Task" [ 1103.571112] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.578450] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.082135] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] VM already powered off {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1104.082135] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1104.082619] env[62585]: DEBUG oslo_concurrency.lockutils [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.082619] env[62585]: DEBUG oslo_concurrency.lockutils [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.082717] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.083035] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f156596-11fb-4d2c-bc1a-f237a560f329 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.091135] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.091315] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1104.092025] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f051733-394c-4dc1-a3a1-fcb3210d8303 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.096944] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1104.096944] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f209a1-6956-e7e7-abcd-fd1751d22ba5" [ 1104.096944] env[62585]: _type = "Task" [ 1104.096944] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.104168] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f209a1-6956-e7e7-abcd-fd1751d22ba5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.607501] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52f209a1-6956-e7e7-abcd-fd1751d22ba5, 'name': SearchDatastore_Task, 'duration_secs': 0.008058} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.608271] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f86bf07c-3e01-4c45-b6f3-ff5bc743e704 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.613246] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1104.613246] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205c3ee-d42f-69b7-50fd-c914dbe1cce8" [ 1104.613246] env[62585]: _type = "Task" [ 1104.613246] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.620389] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205c3ee-d42f-69b7-50fd-c914dbe1cce8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.127607] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5205c3ee-d42f-69b7-50fd-c914dbe1cce8, 'name': SearchDatastore_Task, 'duration_secs': 0.008349} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.127966] env[62585]: DEBUG oslo_concurrency.lockutils [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.128305] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. {{(pid=62585) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1105.128644] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39070c2e-00bb-40a9-9b9e-0ecb389eda21 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.136414] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1105.136414] env[62585]: value = "task-1385214" [ 1105.136414] env[62585]: _type = "Task" [ 1105.136414] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.144481] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.646117] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.444149} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.646386] env[62585]: INFO nova.virt.vmwareapi.ds_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk. [ 1105.647182] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d835778-7ecb-4403-9e6b-8d69afc31020 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.674204] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.674470] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e00b5ecd-4da9-4bab-a6bc-b8af76d1db4b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.693094] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1105.693094] env[62585]: value = "task-1385215" [ 1105.693094] env[62585]: _type = "Task" [ 1105.693094] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.700755] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385215, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.202987] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385215, 'name': ReconfigVM_Task, 'duration_secs': 0.287171} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.203483] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfigured VM instance instance-0000006a to attach disk [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6/790c072e-fdf9-43ec-b7a5-3b21a2eaee40-rescue.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.204136] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1595454-1b9a-430b-91e6-e4bfe3d90ec4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.230363] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a4f6a4-84c3-4cf9-871f-730c8dedf89e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.245921] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1106.245921] env[62585]: value = "task-1385216" [ 1106.245921] env[62585]: _type = "Task" [ 1106.245921] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.254470] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.755727] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385216, 'name': ReconfigVM_Task, 'duration_secs': 0.207257} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.755945] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.756217] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e732bfa-548d-4f13-bfae-757147744e0f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.762744] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1106.762744] env[62585]: value = "task-1385217" [ 1106.762744] env[62585]: _type = "Task" [ 1106.762744] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.769930] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.273170] env[62585]: DEBUG oslo_vmware.api [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385217, 'name': PowerOnVM_Task, 'duration_secs': 0.399684} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.273497] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.276230] env[62585]: DEBUG nova.compute.manager [None req-93ec9807-6633-4289-ba8a-780d4098d651 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1107.276975] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a781e9e-682c-4b00-9577-089171ebfb72 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.282752] env[62585]: INFO nova.compute.manager [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Unrescuing [ 1108.283190] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.283300] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.283404] env[62585]: DEBUG nova.network.neutron [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.041355] env[62585]: DEBUG nova.network.neutron [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.544262] env[62585]: DEBUG oslo_concurrency.lockutils [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.544980] env[62585]: DEBUG nova.objects.instance [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'flavor' on Instance uuid fa4c721c-3455-48c9-bbca-4b4dac29aff6 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.050679] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b34b21-ba9a-4257-b7eb-47c88532f737 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.074129] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1110.074462] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db31cbf3-c537-4eec-9805-7a7a11711b8e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.080980] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1110.080980] env[62585]: value = "task-1385218" [ 1110.080980] env[62585]: _type = "Task" [ 1110.080980] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.088397] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.591432] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385218, 'name': PowerOffVM_Task, 'duration_secs': 0.213865} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.591839] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.596823] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfiguring VM instance instance-0000006a to detach disk 2002 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1110.597096] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cf68d30-719a-4e01-a859-65081ffcd5cf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.613828] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1110.613828] env[62585]: value = "task-1385219" [ 1110.613828] env[62585]: _type = "Task" [ 1110.613828] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.620986] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385219, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.123893] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385219, 'name': ReconfigVM_Task, 'duration_secs': 0.204422} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.124144] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfigured VM instance instance-0000006a to detach disk 2002 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1111.124343] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.124584] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ad8b7f3-0619-455a-9311-4574e5965727 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.130446] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1111.130446] env[62585]: value = "task-1385220" [ 1111.130446] env[62585]: _type = "Task" [ 1111.130446] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.138836] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.641398] env[62585]: DEBUG oslo_vmware.api [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385220, 'name': PowerOnVM_Task, 'duration_secs': 0.388405} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.641821] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.642213] env[62585]: DEBUG nova.compute.manager [None req-9f6f96ff-5f41-436e-8840-7b10ab904d38 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1111.643203] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47738f7b-ca38-4497-b6ac-f4ca724af263 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.753712] env[62585]: DEBUG nova.compute.manager [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1112.753977] env[62585]: DEBUG nova.compute.manager [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing instance network info cache due to event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1112.754269] env[62585]: DEBUG oslo_concurrency.lockutils [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.754425] env[62585]: DEBUG oslo_concurrency.lockutils [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.754590] env[62585]: DEBUG nova.network.neutron [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.475023] env[62585]: DEBUG nova.network.neutron [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updated VIF entry in instance network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1113.475420] env[62585]: DEBUG nova.network.neutron [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.978303] env[62585]: DEBUG oslo_concurrency.lockutils [req-71cc243b-a7af-48ae-b8b3-6f3190ab8df3 req-1e956f63-1bd5-40ce-abe0-8af1e2277a77 service nova] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.781014] env[62585]: DEBUG nova.compute.manager [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1114.781190] env[62585]: DEBUG nova.compute.manager [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing instance network info cache due to event network-changed-2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1114.781440] env[62585]: DEBUG oslo_concurrency.lockutils [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] Acquiring lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.781615] env[62585]: DEBUG oslo_concurrency.lockutils [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] Acquired lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.781786] env[62585]: DEBUG nova.network.neutron [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Refreshing network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.478010] env[62585]: DEBUG nova.network.neutron [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updated VIF entry in instance network info cache for port 2f56c908-7d1b-4833-88be-c915dc3e5385. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1115.478396] env[62585]: DEBUG nova.network.neutron [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [{"id": "2f56c908-7d1b-4833-88be-c915dc3e5385", "address": "fa:16:3e:97:bd:86", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f56c908-7d", "ovs_interfaceid": "2f56c908-7d1b-4833-88be-c915dc3e5385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.980968] env[62585]: DEBUG oslo_concurrency.lockutils [req-e7afc7b3-71d8-4acf-8660-e99f905acfdd req-f595ade5-000c-4c3c-9141-7e8ea516afc1 service nova] Releasing lock "refresh_cache-fa4c721c-3455-48c9-bbca-4b4dac29aff6" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.225360] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.225714] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.225754] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1130.756510] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.756660] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquired lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.756811] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Forcefully refreshing network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1131.949493] env[62585]: DEBUG nova.network.neutron [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [{"id": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "address": "fa:16:3e:37:e6:f2", "network": {"id": "297bb16b-ed70-4c91-adf6-de95678b32a3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2009077866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c48f11ec1fa84b4a96ef72198fcec3ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35342bcb-8b06-472e-b3c0-43fd3d6c4b30", "external-id": "nsx-vlan-transportzone-524", "segmentation_id": 524, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe0d6bab-12", "ovs_interfaceid": "be0d6bab-1253-458c-b3cd-71ed0eb87c2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.452245] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Releasing lock "refresh_cache-d8955c26-85d1-481c-b1d2-4879bb52158b" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.452526] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updated the network info_cache for instance {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1132.452745] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.452901] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.453061] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.453220] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.453386] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.453536] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.957716] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Getting list of instances from cluster (obj){ [ 1132.957716] env[62585]: value = "domain-c8" [ 1132.957716] env[62585]: _type = "ClusterComputeResource" [ 1132.957716] env[62585]: } {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1132.958784] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549a3c54-583e-4599-b6a9-586355518e00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.971576] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Got total of 4 instances {{(pid=62585) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1132.971745] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.971933] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid d8955c26-85d1-481c-b1d2-4879bb52158b {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.972103] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid c31a584a-3dfb-4ec2-8852-e9e27cafcb2d {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.972256] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Triggering sync for uuid fa4c721c-3455-48c9-bbca-4b4dac29aff6 {{(pid=62585) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1132.972622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.972850] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.973144] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "d8955c26-85d1-481c-b1d2-4879bb52158b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.973405] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.973980] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.973980] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.974178] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.974371] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.974575] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.974708] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1132.975421] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65520531-6935-4b63-80df-b2f56806ccb6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.978329] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd726bc-22a3-4ece-a6e8-ab54b016c4a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.981310] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53002222-44f6-41cf-91f5-3789b165a944 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.983907] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e616f1-9304-4456-adcb-73adeac4956c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.986098] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.489642] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.489898] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.490084] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.490241] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1133.491139] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac83701e-7a4a-421a-bafe-df2a3d64b1e2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.499036] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e7fd6c-398c-4a00-ab19-fbf2a0840868 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.503051] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.503403] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.529s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.503726] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.512985] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.539s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.513751] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d98cd77-f8e5-423e-b9d0-8aefea088aaa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.519690] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4316aa-9dc9-484e-bd36-abbf10d05edb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.548565] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180591MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1133.548696] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.548880] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.664643] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.664916] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance d8955c26-85d1-481c-b1d2-4879bb52158b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.665071] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance c31a584a-3dfb-4ec2-8852-e9e27cafcb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.666082] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance fa4c721c-3455-48c9-bbca-4b4dac29aff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.666082] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1134.666082] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1134.683009] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Refreshing inventories for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1134.696113] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Updating ProviderTree inventory for provider 66db9ec1-b5c3-45d2-a885-8e338110656b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1134.696307] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Updating inventory in ProviderTree for provider 66db9ec1-b5c3-45d2-a885-8e338110656b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1134.706010] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Refreshing aggregate associations for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b, aggregates: None {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1134.723531] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Refreshing trait associations for resource provider 66db9ec1-b5c3-45d2-a885-8e338110656b, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62585) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1134.771507] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3bfed0-e60b-4519-8cbe-21555e2fe0a6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.779335] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6380f9-61d7-4aa1-b2bc-e57d300a615f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.811179] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a6a1d2-d72b-41d5-9ff3-ae8d9a2240af {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.818260] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44792c2e-495c-4983-b601-20656819a00e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.833323] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.336633] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1135.337909] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1135.338109] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.789s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.672851] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.673252] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.673387] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.673560] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.673746] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.677317] env[62585]: INFO nova.compute.manager [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Terminating instance [ 1136.679163] env[62585]: DEBUG nova.compute.manager [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1136.679368] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1136.680219] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37a7134-72b8-461d-82fc-e5260b2da662 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.687606] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.688126] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2a6981a-90b9-4f1e-a8d5-69ae703030b2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.694363] env[62585]: DEBUG oslo_vmware.api [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1136.694363] env[62585]: value = "task-1385221" [ 1136.694363] env[62585]: _type = "Task" [ 1136.694363] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.701910] env[62585]: DEBUG oslo_vmware.api [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.204575] env[62585]: DEBUG oslo_vmware.api [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385221, 'name': PowerOffVM_Task, 'duration_secs': 0.180974} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.204870] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.205057] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1137.205306] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2ad5399-8e93-4ffe-9eb9-00584c814e5a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.265609] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1137.265802] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1137.265930] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Deleting the datastore file [datastore1] bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1137.266213] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fa376ee-7229-449a-86f1-8685fed9ab47 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.272824] env[62585]: DEBUG oslo_vmware.api [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1137.272824] env[62585]: value = "task-1385223" [ 1137.272824] env[62585]: _type = "Task" [ 1137.272824] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.280257] env[62585]: DEBUG oslo_vmware.api [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.783569] env[62585]: DEBUG oslo_vmware.api [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131829} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.783980] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.784055] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.784196] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.784376] env[62585]: INFO nova.compute.manager [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1137.784630] env[62585]: DEBUG oslo.service.loopingcall [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1137.784856] env[62585]: DEBUG nova.compute.manager [-] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1137.784953] env[62585]: DEBUG nova.network.neutron [-] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.434153] env[62585]: DEBUG nova.compute.manager [req-a12f2c88-8659-4806-90fe-10ddad4365e2 req-7cbc954d-6fbf-42c1-99cd-156acd0b65e6 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Received event network-vif-deleted-7e93d590-92de-4cbe-9262-4085c844ee88 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1138.434153] env[62585]: INFO nova.compute.manager [req-a12f2c88-8659-4806-90fe-10ddad4365e2 req-7cbc954d-6fbf-42c1-99cd-156acd0b65e6 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Neutron deleted interface 7e93d590-92de-4cbe-9262-4085c844ee88; detaching it from the instance and deleting it from the info cache [ 1138.434153] env[62585]: DEBUG nova.network.neutron [req-a12f2c88-8659-4806-90fe-10ddad4365e2 req-7cbc954d-6fbf-42c1-99cd-156acd0b65e6 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.872044] env[62585]: DEBUG nova.network.neutron [-] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.936655] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8064e5cc-6aa2-4b13-be8a-04e07cc65bf2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.946433] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03401e3a-466f-4378-bcb9-a4e8dd8ae679 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.971929] env[62585]: DEBUG nova.compute.manager [req-a12f2c88-8659-4806-90fe-10ddad4365e2 req-7cbc954d-6fbf-42c1-99cd-156acd0b65e6 service nova] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Detach interface failed, port_id=7e93d590-92de-4cbe-9262-4085c844ee88, reason: Instance bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1139.374263] env[62585]: INFO nova.compute.manager [-] [instance: bb076a4e-eb38-4d0c-bdea-f8ebb46d7968] Took 1.59 seconds to deallocate network for instance. [ 1139.881384] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.881684] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.881719] env[62585]: DEBUG nova.objects.instance [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'resources' on Instance uuid bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.455101] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46841bb-2a67-4523-a84b-8af4cbf53237 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.462893] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f2e607-d8f1-4fab-b0e6-033ff940505b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.492376] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb214e4-8744-4418-b31f-f85c0baac3a6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.500121] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c69bb61-1e0a-4292-9800-0fd561afdf52 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.513433] env[62585]: DEBUG nova.compute.provider_tree [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.017485] env[62585]: DEBUG nova.scheduler.client.report [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1141.523139] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.551167] env[62585]: INFO nova.scheduler.client.report [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Deleted allocations for instance bb076a4e-eb38-4d0c-bdea-f8ebb46d7968 [ 1142.060038] env[62585]: DEBUG oslo_concurrency.lockutils [None req-d4630386-d9cd-4d80-938d-dfbe2bb77927 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "bb076a4e-eb38-4d0c-bdea-f8ebb46d7968" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.387s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.881826] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.882130] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.384663] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Starting instance... {{(pid=62585) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1144.906048] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.906048] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.907933] env[62585]: INFO nova.compute.claims [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.971251] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e132e5ed-f314-467b-acd0-bbc5d5308d18 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.978905] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6a21a3-9cdd-489c-8332-eeb9ffef9d93 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.009200] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e098ee-905a-4599-af87-04b1bb1cb3ee {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.016325] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e3f21f-b9a4-4f6a-828e-77ea310e3304 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.028841] env[62585]: DEBUG nova.compute.provider_tree [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.531755] env[62585]: DEBUG nova.scheduler.client.report [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1147.036823] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.037392] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Start building networks asynchronously for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1147.542703] env[62585]: DEBUG nova.compute.utils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1147.544154] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Allocating IP information in the background. {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1147.544321] env[62585]: DEBUG nova.network.neutron [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] allocate_for_instance() {{(pid=62585) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.591753] env[62585]: DEBUG nova.policy [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1307e393a3fd4cf7b4b1a24571f07c64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c32e1b446add43fe92f7db2dd2373f6c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62585) authorize /opt/stack/nova/nova/policy.py:201}} [ 1147.849794] env[62585]: DEBUG nova.network.neutron [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Successfully created port: 5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1148.050635] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Start building block device mappings for instance. {{(pid=62585) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1148.935707] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.935940] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.058554] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Start spawning the instance on the hypervisor. {{(pid=62585) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1149.083876] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T09:57:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T09:57:26Z,direct_url=,disk_format='vmdk',id=790c072e-fdf9-43ec-b7a5-3b21a2eaee40,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4bd851f044bf4bc19a020fb40c46740a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T09:57:27Z,virtual_size=,visibility=), allow threads: False {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1149.084139] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Flavor limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1149.084310] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Image limits 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.084491] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Flavor pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1149.084672] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Image pref 0:0:0 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.084825] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62585) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1149.085047] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1149.085219] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1149.085391] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Got 1 possible topologies {{(pid=62585) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1149.085555] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1149.085730] env[62585]: DEBUG nova.virt.hardware [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62585) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1149.086606] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a3fc92-9113-444e-b360-9e1d8bb95969 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.095643] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c437fde4-f80b-4953-97d6-ed37a749f9fe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.208567] env[62585]: DEBUG nova.compute.manager [req-634864c0-6a84-4af4-97af-cb2f66aa085a req-d03b9ffa-f64e-469c-9188-f0b6982c0ca0 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Received event network-vif-plugged-5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1149.208801] env[62585]: DEBUG oslo_concurrency.lockutils [req-634864c0-6a84-4af4-97af-cb2f66aa085a req-d03b9ffa-f64e-469c-9188-f0b6982c0ca0 service nova] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.209025] env[62585]: DEBUG oslo_concurrency.lockutils [req-634864c0-6a84-4af4-97af-cb2f66aa085a req-d03b9ffa-f64e-469c-9188-f0b6982c0ca0 service nova] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.209203] env[62585]: DEBUG oslo_concurrency.lockutils [req-634864c0-6a84-4af4-97af-cb2f66aa085a req-d03b9ffa-f64e-469c-9188-f0b6982c0ca0 service nova] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.209375] env[62585]: DEBUG nova.compute.manager [req-634864c0-6a84-4af4-97af-cb2f66aa085a req-d03b9ffa-f64e-469c-9188-f0b6982c0ca0 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] No waiting events found dispatching network-vif-plugged-5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1149.209546] env[62585]: WARNING nova.compute.manager [req-634864c0-6a84-4af4-97af-cb2f66aa085a req-d03b9ffa-f64e-469c-9188-f0b6982c0ca0 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Received unexpected event network-vif-plugged-5d5de77a-cccd-423b-8d1c-7b96f243f6e9 for instance with vm_state building and task_state spawning. [ 1149.288754] env[62585]: DEBUG nova.network.neutron [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Successfully updated port: 5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.438906] env[62585]: INFO nova.compute.manager [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Detaching volume 0049076e-e184-464d-bb32-a8231468072e [ 1149.472733] env[62585]: INFO nova.virt.block_device [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Attempting to driver detach volume 0049076e-e184-464d-bb32-a8231468072e from mountpoint /dev/sdb [ 1149.472973] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Volume detach. Driver type: vmdk {{(pid=62585) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1149.473176] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294086', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'name': 'volume-0049076e-e184-464d-bb32-a8231468072e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa4c721c-3455-48c9-bbca-4b4dac29aff6', 'attached_at': '', 'detached_at': '', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'serial': '0049076e-e184-464d-bb32-a8231468072e'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1149.474051] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2bd29b-272f-4942-ad11-f5213b240d6c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.494890] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0318f3-e7a3-42c2-918c-17b2f5652909 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.501282] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d32f92-5c30-40d6-8b62-1ef88f0290ff {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.520278] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2b2340-9b6c-493d-a2c3-6a0458b3ad20 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.533813] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] The volume has not been displaced from its original location: [datastore2] volume-0049076e-e184-464d-bb32-a8231468072e/volume-0049076e-e184-464d-bb32-a8231468072e.vmdk. No consolidation needed. {{(pid=62585) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1149.539009] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1149.539415] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c1fd55-e968-40d7-bc1d-6295715c0f63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.556337] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1149.556337] env[62585]: value = "task-1385224" [ 1149.556337] env[62585]: _type = "Task" [ 1149.556337] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.563620] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.791238] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.791388] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.791546] env[62585]: DEBUG nova.network.neutron [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Building network info cache for instance {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.067553] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385224, 'name': ReconfigVM_Task, 'duration_secs': 0.227457} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.067906] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1150.072203] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f9a53f9-8d3f-4850-9307-5af7eda2999d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.086206] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1150.086206] env[62585]: value = "task-1385225" [ 1150.086206] env[62585]: _type = "Task" [ 1150.086206] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.093568] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.322482] env[62585]: DEBUG nova.network.neutron [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Instance cache missing network info. {{(pid=62585) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.441803] env[62585]: DEBUG nova.network.neutron [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating instance_info_cache with network_info: [{"id": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "address": "fa:16:3e:ad:9b:0e", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5de77a-cc", "ovs_interfaceid": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.595899] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385225, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.944432] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.944776] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Instance network_info: |[{"id": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "address": "fa:16:3e:ad:9b:0e", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5de77a-cc", "ovs_interfaceid": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62585) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1150.945240] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:9b:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d5de77a-cccd-423b-8d1c-7b96f243f6e9', 'vif_model': 'vmxnet3'}] {{(pid=62585) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1150.952554] env[62585]: DEBUG oslo.service.loopingcall [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1150.952772] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Creating VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1150.952994] env[62585]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4838cf54-1d6c-43e5-9673-3377cd740ea2 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.972041] env[62585]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1150.972041] env[62585]: value = "task-1385226" [ 1150.972041] env[62585]: _type = "Task" [ 1150.972041] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.979159] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385226, 'name': CreateVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.096680] env[62585]: DEBUG oslo_vmware.api [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385225, 'name': ReconfigVM_Task, 'duration_secs': 0.760093} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.097082] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294086', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'name': 'volume-0049076e-e184-464d-bb32-a8231468072e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa4c721c-3455-48c9-bbca-4b4dac29aff6', 'attached_at': '', 'detached_at': '', 'volume_id': '0049076e-e184-464d-bb32-a8231468072e', 'serial': '0049076e-e184-464d-bb32-a8231468072e'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1151.233343] env[62585]: DEBUG nova.compute.manager [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Received event network-changed-5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1151.233566] env[62585]: DEBUG nova.compute.manager [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Refreshing instance network info cache due to event network-changed-5d5de77a-cccd-423b-8d1c-7b96f243f6e9. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1151.233810] env[62585]: DEBUG oslo_concurrency.lockutils [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] Acquiring lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.233946] env[62585]: DEBUG oslo_concurrency.lockutils [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] Acquired lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.234121] env[62585]: DEBUG nova.network.neutron [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Refreshing network info cache for port 5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.482399] env[62585]: DEBUG oslo_vmware.api [-] Task: {'id': task-1385226, 'name': CreateVM_Task, 'duration_secs': 0.28424} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.482623] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Created VM on the ESX host {{(pid=62585) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.483270] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.483444] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.483770] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1151.484029] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d7da6a7-348d-4d4d-a4f2-c42c6bdbc697 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.488234] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1151.488234] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5230b1e5-ff27-c4e4-94a4-47be4f78aacd" [ 1151.488234] env[62585]: _type = "Task" [ 1151.488234] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.495677] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5230b1e5-ff27-c4e4-94a4-47be4f78aacd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.640062] env[62585]: DEBUG nova.objects.instance [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'flavor' on Instance uuid fa4c721c-3455-48c9-bbca-4b4dac29aff6 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.932552] env[62585]: DEBUG nova.network.neutron [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updated VIF entry in instance network info cache for port 5d5de77a-cccd-423b-8d1c-7b96f243f6e9. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.936265] env[62585]: DEBUG nova.network.neutron [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating instance_info_cache with network_info: [{"id": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "address": "fa:16:3e:ad:9b:0e", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5de77a-cc", "ovs_interfaceid": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.998908] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5230b1e5-ff27-c4e4-94a4-47be4f78aacd, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.999230] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.999467] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Processing image 790c072e-fdf9-43ec-b7a5-3b21a2eaee40 {{(pid=62585) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1151.999705] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.999857] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.000048] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.000304] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-606d2318-c33b-42dd-b5e9-f78971587657 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.007863] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62585) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.008053] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62585) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.008707] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf6d082-82a0-49a7-94b0-645c8b13e3f1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.013347] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1152.013347] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d810c4-dde1-66f1-57dd-1847a805896c" [ 1152.013347] env[62585]: _type = "Task" [ 1152.013347] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.020342] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d810c4-dde1-66f1-57dd-1847a805896c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.444873] env[62585]: DEBUG oslo_concurrency.lockutils [req-15461f4f-69e4-475c-9178-4067f3d32f5c req-9d2c73bb-524f-4d2d-9aee-96986f0b2d70 service nova] Releasing lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.524729] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]52d810c4-dde1-66f1-57dd-1847a805896c, 'name': SearchDatastore_Task, 'duration_secs': 0.008161} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.525463] env[62585]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32e38a45-678a-45fb-9c02-ca81725fe905 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.530153] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1152.530153] env[62585]: value = "session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5260d43f-a415-5072-bd06-d5b1323baa9b" [ 1152.530153] env[62585]: _type = "Task" [ 1152.530153] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.537234] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5260d43f-a415-5072-bd06-d5b1323baa9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.648811] env[62585]: DEBUG oslo_concurrency.lockutils [None req-a5a6fd59-4c69-4b71-ad96-8ce5ef882f49 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.712s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.040108] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': session[52af1c29-936d-ff9c-2a72-70b9fabc8261]5260d43f-a415-5072-bd06-d5b1323baa9b, 'name': SearchDatastore_Task, 'duration_secs': 0.008822} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.040385] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.040638] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 64a25430-b7bb-474a-aee2-a7cb801f335c/64a25430-b7bb-474a-aee2-a7cb801f335c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.040891] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-091b5395-05ab-4492-9b7e-dbb2767ccd3b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.048523] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1153.048523] env[62585]: value = "task-1385227" [ 1153.048523] env[62585]: _type = "Task" [ 1153.048523] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.056446] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.225752] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.226066] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.226295] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.226490] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.226672] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.228928] env[62585]: INFO nova.compute.manager [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Terminating instance [ 1153.230812] env[62585]: DEBUG nova.compute.manager [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1153.231015] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1153.231848] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0300119e-d030-4077-a03b-9bd9ada20109 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.239601] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1153.239816] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48cee5d4-1073-4f3a-bff7-a4eff1fe92a4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.246364] env[62585]: DEBUG oslo_vmware.api [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1153.246364] env[62585]: value = "task-1385228" [ 1153.246364] env[62585]: _type = "Task" [ 1153.246364] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.254855] env[62585]: DEBUG oslo_vmware.api [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.558954] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457258} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.559337] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/790c072e-fdf9-43ec-b7a5-3b21a2eaee40/790c072e-fdf9-43ec-b7a5-3b21a2eaee40.vmdk to [datastore1] 64a25430-b7bb-474a-aee2-a7cb801f335c/64a25430-b7bb-474a-aee2-a7cb801f335c.vmdk {{(pid=62585) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1153.559409] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Extending root virtual disk to 1048576 {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1153.559636] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7fa0b0e-342f-41b2-9cb0-7d34d4482101 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.566104] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1153.566104] env[62585]: value = "task-1385229" [ 1153.566104] env[62585]: _type = "Task" [ 1153.566104] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.574099] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.756333] env[62585]: DEBUG oslo_vmware.api [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385228, 'name': PowerOffVM_Task, 'duration_secs': 0.235523} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.756594] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.756768] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1153.757011] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-625d21ae-c2a5-4313-b250-3943b0514ab1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.816209] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.816384] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Deleting contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.816606] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleting the datastore file [datastore2] fa4c721c-3455-48c9-bbca-4b4dac29aff6 {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.816844] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18f95af8-4b54-49e6-8fdc-308dbb57fa64 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.822680] env[62585]: DEBUG oslo_vmware.api [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1153.822680] env[62585]: value = "task-1385231" [ 1153.822680] env[62585]: _type = "Task" [ 1153.822680] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.829673] env[62585]: DEBUG oslo_vmware.api [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.076202] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061271} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.076553] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Extended root virtual disk {{(pid=62585) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.077354] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e20b53-ebe7-4695-a8fe-7c8a573bc6bd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.098630] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 64a25430-b7bb-474a-aee2-a7cb801f335c/64a25430-b7bb-474a-aee2-a7cb801f335c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.098887] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d568bdce-b5f1-477e-bf17-208b9028091c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.118501] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1154.118501] env[62585]: value = "task-1385232" [ 1154.118501] env[62585]: _type = "Task" [ 1154.118501] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.125716] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.332999] env[62585]: DEBUG oslo_vmware.api [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156155} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.333232] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1154.333436] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Deleted contents of the VM from datastore datastore2 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1154.333636] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1154.333845] env[62585]: INFO nova.compute.manager [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1154.334104] env[62585]: DEBUG oslo.service.loopingcall [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1154.334315] env[62585]: DEBUG nova.compute.manager [-] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1154.334409] env[62585]: DEBUG nova.network.neutron [-] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1154.628741] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385232, 'name': ReconfigVM_Task, 'duration_secs': 0.284983} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.629081] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 64a25430-b7bb-474a-aee2-a7cb801f335c/64a25430-b7bb-474a-aee2-a7cb801f335c.vmdk or device None with type sparse {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1154.629710] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c27849a-480b-4540-904c-3f2357fa6dca {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.636495] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1154.636495] env[62585]: value = "task-1385233" [ 1154.636495] env[62585]: _type = "Task" [ 1154.636495] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.643751] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385233, 'name': Rename_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.774653] env[62585]: DEBUG nova.compute.manager [req-01dce484-55cd-4548-a9d7-f24d4e280505 req-3a6885ea-26bb-446b-bfc3-b6eaf6b45e9e service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Received event network-vif-deleted-2f56c908-7d1b-4833-88be-c915dc3e5385 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1154.774761] env[62585]: INFO nova.compute.manager [req-01dce484-55cd-4548-a9d7-f24d4e280505 req-3a6885ea-26bb-446b-bfc3-b6eaf6b45e9e service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Neutron deleted interface 2f56c908-7d1b-4833-88be-c915dc3e5385; detaching it from the instance and deleting it from the info cache [ 1154.774946] env[62585]: DEBUG nova.network.neutron [req-01dce484-55cd-4548-a9d7-f24d4e280505 req-3a6885ea-26bb-446b-bfc3-b6eaf6b45e9e service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.145866] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385233, 'name': Rename_Task, 'duration_secs': 0.138366} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.146163] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Powering on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.146426] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ae42d4d-8add-4aab-803c-1e1c5847bb96 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.152290] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1155.152290] env[62585]: value = "task-1385234" [ 1155.152290] env[62585]: _type = "Task" [ 1155.152290] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.159299] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.252880] env[62585]: DEBUG nova.network.neutron [-] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.278061] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2de5dba-1cff-46f9-881b-4d4484960afe {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.287829] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdaf027-ac26-4e9e-808b-5ebd97c34f5b {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.313438] env[62585]: DEBUG nova.compute.manager [req-01dce484-55cd-4548-a9d7-f24d4e280505 req-3a6885ea-26bb-446b-bfc3-b6eaf6b45e9e service nova] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Detach interface failed, port_id=2f56c908-7d1b-4833-88be-c915dc3e5385, reason: Instance fa4c721c-3455-48c9-bbca-4b4dac29aff6 could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1155.662062] env[62585]: DEBUG oslo_vmware.api [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385234, 'name': PowerOnVM_Task, 'duration_secs': 0.471485} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.662380] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Powered on the VM {{(pid=62585) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.662554] env[62585]: INFO nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1155.662746] env[62585]: DEBUG nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Checking state {{(pid=62585) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1155.663525] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49345b2-255c-48a2-9e97-936d6091e6e0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.756280] env[62585]: INFO nova.compute.manager [-] [instance: fa4c721c-3455-48c9-bbca-4b4dac29aff6] Took 1.42 seconds to deallocate network for instance. [ 1156.180975] env[62585]: INFO nova.compute.manager [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Took 11.29 seconds to build instance. [ 1156.263357] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.263659] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.263892] env[62585]: DEBUG nova.objects.instance [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'resources' on Instance uuid fa4c721c-3455-48c9-bbca-4b4dac29aff6 {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1156.683389] env[62585]: DEBUG oslo_concurrency.lockutils [None req-e550924a-e4ee-43a6-b544-af846ae790a5 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.801s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.826410] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a084a1-28eb-496c-ae4f-07834f547e30 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.834147] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1967676-d4bb-416d-99a8-1535ca7c58cd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.863386] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde67c34-fe42-4453-9024-bb17a4546883 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.871080] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e49b09b-04e1-4d0d-a875-2983d048e207 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.884203] env[62585]: DEBUG nova.compute.provider_tree [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.887317] env[62585]: DEBUG nova.compute.manager [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Received event network-changed-5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1156.887506] env[62585]: DEBUG nova.compute.manager [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Refreshing instance network info cache due to event network-changed-5d5de77a-cccd-423b-8d1c-7b96f243f6e9. {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1156.887718] env[62585]: DEBUG oslo_concurrency.lockutils [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] Acquiring lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.887864] env[62585]: DEBUG oslo_concurrency.lockutils [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] Acquired lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.888039] env[62585]: DEBUG nova.network.neutron [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Refreshing network info cache for port 5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1157.391129] env[62585]: DEBUG nova.scheduler.client.report [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1157.593153] env[62585]: DEBUG nova.network.neutron [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updated VIF entry in instance network info cache for port 5d5de77a-cccd-423b-8d1c-7b96f243f6e9. {{(pid=62585) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.593552] env[62585]: DEBUG nova.network.neutron [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating instance_info_cache with network_info: [{"id": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "address": "fa:16:3e:ad:9b:0e", "network": {"id": "66030331-b20b-4f58-ac7c-9dbd68ceaf6a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-363131979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c32e1b446add43fe92f7db2dd2373f6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5de77a-cc", "ovs_interfaceid": "5d5de77a-cccd-423b-8d1c-7b96f243f6e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.897268] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.916820] env[62585]: INFO nova.scheduler.client.report [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleted allocations for instance fa4c721c-3455-48c9-bbca-4b4dac29aff6 [ 1158.096360] env[62585]: DEBUG oslo_concurrency.lockutils [req-ea78693a-9062-49be-b7f6-df391953c84b req-257fe693-3491-4394-94ca-44a2e9758cc7 service nova] Releasing lock "refresh_cache-64a25430-b7bb-474a-aee2-a7cb801f335c" {{(pid=62585) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.424745] env[62585]: DEBUG oslo_concurrency.lockutils [None req-6401de92-c1f8-4dfe-86b4-a94930102781 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "fa4c721c-3455-48c9-bbca-4b4dac29aff6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.199s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.377031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.377031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.377031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.377031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.377031] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.378707] env[62585]: INFO nova.compute.manager [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Terminating instance [ 1163.384115] env[62585]: DEBUG nova.compute.manager [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1163.384115] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.384115] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a3f098-0298-4c9d-9f65-c0de06aa29a8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.391140] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.391673] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa8c0968-a53b-4c44-99e5-162ab6394d0d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.401017] env[62585]: DEBUG oslo_vmware.api [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1163.401017] env[62585]: value = "task-1385236" [ 1163.401017] env[62585]: _type = "Task" [ 1163.401017] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.409852] env[62585]: DEBUG oslo_vmware.api [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.912535] env[62585]: DEBUG oslo_vmware.api [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385236, 'name': PowerOffVM_Task, 'duration_secs': 0.201271} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.913471] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.913471] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.913592] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84ee1b59-9170-4d3a-a73b-00d4ef5c78e5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.972277] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.972554] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.972710] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleting the datastore file [datastore1] c31a584a-3dfb-4ec2-8852-e9e27cafcb2d {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.972980] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5754b5fc-54c5-4172-b604-d3e94d29b3c4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.978424] env[62585]: DEBUG oslo_vmware.api [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1163.978424] env[62585]: value = "task-1385238" [ 1163.978424] env[62585]: _type = "Task" [ 1163.978424] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.985940] env[62585]: DEBUG oslo_vmware.api [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385238, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.487922] env[62585]: DEBUG oslo_vmware.api [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385238, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17329} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.488274] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.488407] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.488555] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.488733] env[62585]: INFO nova.compute.manager [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1164.488973] env[62585]: DEBUG oslo.service.loopingcall [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1164.489183] env[62585]: DEBUG nova.compute.manager [-] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1164.489277] env[62585]: DEBUG nova.network.neutron [-] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1164.808833] env[62585]: DEBUG nova.compute.manager [req-400137ce-97af-4d85-9648-a7cff56ab580 req-33ac8164-3027-4f54-bd03-47d33ee96596 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Received event network-vif-deleted-ce4064ff-0c6e-4bbd-83dd-132713bb289f {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1164.809054] env[62585]: INFO nova.compute.manager [req-400137ce-97af-4d85-9648-a7cff56ab580 req-33ac8164-3027-4f54-bd03-47d33ee96596 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Neutron deleted interface ce4064ff-0c6e-4bbd-83dd-132713bb289f; detaching it from the instance and deleting it from the info cache [ 1164.809231] env[62585]: DEBUG nova.network.neutron [req-400137ce-97af-4d85-9648-a7cff56ab580 req-33ac8164-3027-4f54-bd03-47d33ee96596 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.289018] env[62585]: DEBUG nova.network.neutron [-] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.312039] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42b37780-f202-4aa0-b184-b209f4509d14 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.321091] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccd0851-495f-4585-9d76-01a11b31da27 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.343591] env[62585]: DEBUG nova.compute.manager [req-400137ce-97af-4d85-9648-a7cff56ab580 req-33ac8164-3027-4f54-bd03-47d33ee96596 service nova] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Detach interface failed, port_id=ce4064ff-0c6e-4bbd-83dd-132713bb289f, reason: Instance c31a584a-3dfb-4ec2-8852-e9e27cafcb2d could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1165.793332] env[62585]: INFO nova.compute.manager [-] [instance: c31a584a-3dfb-4ec2-8852-e9e27cafcb2d] Took 1.30 seconds to deallocate network for instance. [ 1166.300864] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.300864] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.300864] env[62585]: DEBUG nova.objects.instance [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'resources' on Instance uuid c31a584a-3dfb-4ec2-8852-e9e27cafcb2d {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.862014] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0074e267-36ed-4aa5-bf9e-03e6a246118c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.870430] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d38102-b7b9-4a7b-b77d-38937c065c95 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.898922] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2327b155-42c8-4a7e-beeb-b43384b222bc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.905794] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c44a9c-e986-4729-93cb-2487613f7b4c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.919967] env[62585]: DEBUG nova.compute.provider_tree [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.422813] env[62585]: DEBUG nova.scheduler.client.report [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1167.928382] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.957555] env[62585]: INFO nova.scheduler.client.report [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleted allocations for instance c31a584a-3dfb-4ec2-8852-e9e27cafcb2d [ 1168.469885] env[62585]: DEBUG oslo_concurrency.lockutils [None req-b2817d26-268f-49d0-ab72-0a38f83074c3 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "c31a584a-3dfb-4ec2-8852-e9e27cafcb2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.095s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.976876] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "d8955c26-85d1-481c-b1d2-4879bb52158b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.977256] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.977404] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "d8955c26-85d1-481c-b1d2-4879bb52158b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.977591] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.977766] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.979867] env[62585]: INFO nova.compute.manager [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Terminating instance [ 1168.981639] env[62585]: DEBUG nova.compute.manager [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1168.981835] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.982699] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c509ff49-fa00-4257-b088-68264d6fa503 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.990607] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.990840] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19ed17f4-c627-43b8-b0c8-7e832636e218 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.997607] env[62585]: DEBUG oslo_vmware.api [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1168.997607] env[62585]: value = "task-1385239" [ 1168.997607] env[62585]: _type = "Task" [ 1168.997607] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.005545] env[62585]: DEBUG oslo_vmware.api [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.507774] env[62585]: DEBUG oslo_vmware.api [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385239, 'name': PowerOffVM_Task, 'duration_secs': 0.181814} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.508056] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.508248] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.508499] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49105a60-934f-4ab9-b60a-3e242214f93a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.570516] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.570779] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.570977] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleting the datastore file [datastore1] d8955c26-85d1-481c-b1d2-4879bb52158b {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.571276] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c429855-2bf9-46b8-9a6b-45fd2bd367b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.577960] env[62585]: DEBUG oslo_vmware.api [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for the task: (returnval){ [ 1169.577960] env[62585]: value = "task-1385241" [ 1169.577960] env[62585]: _type = "Task" [ 1169.577960] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.585486] env[62585]: DEBUG oslo_vmware.api [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.087657] env[62585]: DEBUG oslo_vmware.api [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Task: {'id': task-1385241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129848} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.088034] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.088195] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.088406] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.088605] env[62585]: INFO nova.compute.manager [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1170.088859] env[62585]: DEBUG oslo.service.loopingcall [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1170.089064] env[62585]: DEBUG nova.compute.manager [-] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1170.089170] env[62585]: DEBUG nova.network.neutron [-] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1170.386479] env[62585]: DEBUG nova.compute.manager [req-e7a566e9-2762-4e13-9df3-65aa611affaa req-04605859-7534-4b6c-8c5a-6a752e70c7e9 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Received event network-vif-deleted-be0d6bab-1253-458c-b3cd-71ed0eb87c2c {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1170.386607] env[62585]: INFO nova.compute.manager [req-e7a566e9-2762-4e13-9df3-65aa611affaa req-04605859-7534-4b6c-8c5a-6a752e70c7e9 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Neutron deleted interface be0d6bab-1253-458c-b3cd-71ed0eb87c2c; detaching it from the instance and deleting it from the info cache [ 1170.386785] env[62585]: DEBUG nova.network.neutron [req-e7a566e9-2762-4e13-9df3-65aa611affaa req-04605859-7534-4b6c-8c5a-6a752e70c7e9 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.867281] env[62585]: DEBUG nova.network.neutron [-] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.891040] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fd42ffc-e430-401c-849e-216e6d80231f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.900457] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f190fd6-eaba-4083-bf8e-64e2a2d4b13c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.922815] env[62585]: DEBUG nova.compute.manager [req-e7a566e9-2762-4e13-9df3-65aa611affaa req-04605859-7534-4b6c-8c5a-6a752e70c7e9 service nova] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Detach interface failed, port_id=be0d6bab-1253-458c-b3cd-71ed0eb87c2c, reason: Instance d8955c26-85d1-481c-b1d2-4879bb52158b could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1171.370363] env[62585]: INFO nova.compute.manager [-] [instance: d8955c26-85d1-481c-b1d2-4879bb52158b] Took 1.28 seconds to deallocate network for instance. [ 1171.877043] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.877225] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.877455] env[62585]: DEBUG nova.objects.instance [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lazy-loading 'resources' on Instance uuid d8955c26-85d1-481c-b1d2-4879bb52158b {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.421387] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3224f398-20b5-4b4b-82b6-0da66cf4be2f {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.429403] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3438f812-a499-4475-a950-ba7e2c5a797a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.458793] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93858997-46d5-4565-9247-c1ea387c9b45 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.466009] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0ed676-dbed-4b92-a9c2-1264c0194985 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.479868] env[62585]: DEBUG nova.compute.provider_tree [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.983049] env[62585]: DEBUG nova.scheduler.client.report [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.487376] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.505674] env[62585]: INFO nova.scheduler.client.report [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Deleted allocations for instance d8955c26-85d1-481c-b1d2-4879bb52158b [ 1174.013654] env[62585]: DEBUG oslo_concurrency.lockutils [None req-802f5423-fbe7-4ffb-acbe-5785e6266c23 tempest-ServerRescueNegativeTestJSON-377455709 tempest-ServerRescueNegativeTestJSON-377455709-project-member] Lock "d8955c26-85d1-481c-b1d2-4879bb52158b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.036s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.171303] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.171715] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.677254] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.677434] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1194.682783] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Didn't find any instances for network info cache update. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1194.683193] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.683235] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.683357] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.683504] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.683646] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.683795] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.683973] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1194.684152] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.847626] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.847876] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.187482] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.187746] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.187880] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.188042] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1195.188964] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd8eed5-3a1b-49ee-9179-78a628a5ac91 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.198164] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b05580-c006-4bf6-8e28-331c296c8e69 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.211563] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e2344b-7ca3-46f8-8efd-a536c9e2d9f6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.217516] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c83188-afaa-4dfd-be7a-fdb3a455e301 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.244727] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180776MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1195.244854] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.245053] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.350953] env[62585]: DEBUG nova.compute.utils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1195.853679] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.269743] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Instance 64a25430-b7bb-474a-aee2-a7cb801f335c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62585) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.269956] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1196.270121] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1196.294532] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5f5243-189a-403c-941c-76e7224882cc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.301946] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5237fe52-6627-4df7-8ed4-55947804d455 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.331677] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9502a335-7a16-4675-85c5-f999eff21cd6 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.338065] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bf673d-adf5-44ce-92bf-91a18cdd124c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.350310] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.853565] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1196.911818] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.912169] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.912299] env[62585]: INFO nova.compute.manager [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Attaching volume 431f3188-05ad-40af-bfb8-5ff4ef92774f to /dev/sdb [ 1196.942903] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336daadf-fcbc-4bf6-8bf2-2467d1a12adb {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.950123] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf9de59-d8d3-41c9-82bd-87ff2fa443fc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.962277] env[62585]: DEBUG nova.virt.block_device [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating existing volume attachment record: ec654a2e-a17a-4c0d-9bf5-0100af9af5f9 {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1197.358945] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1197.359185] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.114s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.504305] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Volume attach. Driver type: vmdk {{(pid=62585) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1201.504551] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294088', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'name': 'volume-431f3188-05ad-40af-bfb8-5ff4ef92774f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'serial': '431f3188-05ad-40af-bfb8-5ff4ef92774f'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1201.505419] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1bd2c0-d11f-4258-8298-cd9f70133a9a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.521516] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebcb99a-cbd3-4b1a-8486-f58d76e2bcdd {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.546422] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-431f3188-05ad-40af-bfb8-5ff4ef92774f/volume-431f3188-05ad-40af-bfb8-5ff4ef92774f.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.546684] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1230b49c-726e-4b17-ae1d-225d5ee97ea4 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.564791] env[62585]: DEBUG oslo_vmware.api [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1201.564791] env[62585]: value = "task-1385244" [ 1201.564791] env[62585]: _type = "Task" [ 1201.564791] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.572546] env[62585]: DEBUG oslo_vmware.api [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385244, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.074110] env[62585]: DEBUG oslo_vmware.api [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385244, 'name': ReconfigVM_Task, 'duration_secs': 0.363472} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.074397] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-431f3188-05ad-40af-bfb8-5ff4ef92774f/volume-431f3188-05ad-40af-bfb8-5ff4ef92774f.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1202.078912] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cda348fd-5d9f-482a-8722-34aea92eece9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.093470] env[62585]: DEBUG oslo_vmware.api [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1202.093470] env[62585]: value = "task-1385245" [ 1202.093470] env[62585]: _type = "Task" [ 1202.093470] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.101243] env[62585]: DEBUG oslo_vmware.api [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385245, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.603207] env[62585]: DEBUG oslo_vmware.api [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385245, 'name': ReconfigVM_Task, 'duration_secs': 0.162302} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.603510] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294088', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'name': 'volume-431f3188-05ad-40af-bfb8-5ff4ef92774f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'serial': '431f3188-05ad-40af-bfb8-5ff4ef92774f'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1203.640113] env[62585]: DEBUG nova.objects.instance [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid 64a25430-b7bb-474a-aee2-a7cb801f335c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.147224] env[62585]: DEBUG oslo_concurrency.lockutils [None req-0198b0b7-ae6b-40b0-b3fa-1a72d191f13c tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.235s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.947639] env[62585]: DEBUG oslo_concurrency.lockutils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.947893] env[62585]: DEBUG oslo_concurrency.lockutils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.451052] env[62585]: DEBUG nova.compute.utils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Using /dev/sd instead of None {{(pid=62585) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1205.954392] env[62585]: DEBUG oslo_concurrency.lockutils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.006819] env[62585]: DEBUG oslo_concurrency.lockutils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.007205] env[62585]: DEBUG oslo_concurrency.lockutils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.007364] env[62585]: INFO nova.compute.manager [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Attaching volume 9d961fba-19dd-4b69-83f5-35d1264a25d9 to /dev/sdc [ 1207.037532] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479d9d98-beae-4dd8-ba68-ee7ff0088fad {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.044958] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d96696-cf93-45c7-b000-8dcdf355f046 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.058185] env[62585]: DEBUG nova.virt.block_device [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating existing volume attachment record: 1206cb5f-b44e-4621-bc63-7d94cf135d0e {{(pid=62585) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1211.600825] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Volume attach. Driver type: vmdk {{(pid=62585) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1211.601110] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294089', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'name': 'volume-9d961fba-19dd-4b69-83f5-35d1264a25d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'serial': '9d961fba-19dd-4b69-83f5-35d1264a25d9'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1211.602012] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1532e88b-300b-4e8e-97d1-91544e5eddaa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.618181] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99af9842-2c23-4c66-ba50-c5a1ffc66558 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.644363] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-9d961fba-19dd-4b69-83f5-35d1264a25d9/volume-9d961fba-19dd-4b69-83f5-35d1264a25d9.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1211.644600] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-276fc254-c9f1-4bb0-a4c7-cc072479c594 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.661733] env[62585]: DEBUG oslo_vmware.api [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1211.661733] env[62585]: value = "task-1385248" [ 1211.661733] env[62585]: _type = "Task" [ 1211.661733] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.669092] env[62585]: DEBUG oslo_vmware.api [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385248, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.170841] env[62585]: DEBUG oslo_vmware.api [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385248, 'name': ReconfigVM_Task, 'duration_secs': 0.338677} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.171155] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-9d961fba-19dd-4b69-83f5-35d1264a25d9/volume-9d961fba-19dd-4b69-83f5-35d1264a25d9.vmdk or device None with type thin {{(pid=62585) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1212.175688] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a61c9ef7-df85-45ac-9ff7-3f9917468a63 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.190225] env[62585]: DEBUG oslo_vmware.api [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1212.190225] env[62585]: value = "task-1385249" [ 1212.190225] env[62585]: _type = "Task" [ 1212.190225] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.197728] env[62585]: DEBUG oslo_vmware.api [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385249, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.699116] env[62585]: DEBUG oslo_vmware.api [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385249, 'name': ReconfigVM_Task, 'duration_secs': 0.128914} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.699381] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294089', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'name': 'volume-9d961fba-19dd-4b69-83f5-35d1264a25d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'serial': '9d961fba-19dd-4b69-83f5-35d1264a25d9'} {{(pid=62585) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1213.733684] env[62585]: DEBUG nova.objects.instance [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid 64a25430-b7bb-474a-aee2-a7cb801f335c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.239096] env[62585]: DEBUG oslo_concurrency.lockutils [None req-28818c14-34af-4a8c-b57f-cc7dc4d75ddf tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.519900] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.520151] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.024030] env[62585]: INFO nova.compute.manager [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Detaching volume 431f3188-05ad-40af-bfb8-5ff4ef92774f [ 1215.053265] env[62585]: INFO nova.virt.block_device [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Attempting to driver detach volume 431f3188-05ad-40af-bfb8-5ff4ef92774f from mountpoint /dev/sdb [ 1215.053531] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Volume detach. Driver type: vmdk {{(pid=62585) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1215.053719] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294088', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'name': 'volume-431f3188-05ad-40af-bfb8-5ff4ef92774f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'serial': '431f3188-05ad-40af-bfb8-5ff4ef92774f'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1215.054616] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3675912-9b85-4684-825b-596254d03efa {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.078125] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2384b3-10d8-4dba-ab15-bf74f3f711c5 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.084958] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa6d297-23a8-4979-9b73-c44f4bbbec2e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.106640] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c81ea6-1079-4a15-b451-46a2932b6157 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.122426] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] The volume has not been displaced from its original location: [datastore1] volume-431f3188-05ad-40af-bfb8-5ff4ef92774f/volume-431f3188-05ad-40af-bfb8-5ff4ef92774f.vmdk. No consolidation needed. {{(pid=62585) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1215.127451] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1215.127707] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a5a7ac-9dd8-4316-9241-37ab0f92db00 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.144159] env[62585]: DEBUG oslo_vmware.api [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1215.144159] env[62585]: value = "task-1385250" [ 1215.144159] env[62585]: _type = "Task" [ 1215.144159] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.151172] env[62585]: DEBUG oslo_vmware.api [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385250, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.653576] env[62585]: DEBUG oslo_vmware.api [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385250, 'name': ReconfigVM_Task, 'duration_secs': 0.210865} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.653814] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1215.658354] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48503c72-b374-4400-bd38-768207675d78 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.673034] env[62585]: DEBUG oslo_vmware.api [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1215.673034] env[62585]: value = "task-1385251" [ 1215.673034] env[62585]: _type = "Task" [ 1215.673034] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.681243] env[62585]: DEBUG oslo_vmware.api [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.183335] env[62585]: DEBUG oslo_vmware.api [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385251, 'name': ReconfigVM_Task, 'duration_secs': 0.118831} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.183672] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294088', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'name': 'volume-431f3188-05ad-40af-bfb8-5ff4ef92774f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '431f3188-05ad-40af-bfb8-5ff4ef92774f', 'serial': '431f3188-05ad-40af-bfb8-5ff4ef92774f'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1216.724997] env[62585]: DEBUG nova.objects.instance [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid 64a25430-b7bb-474a-aee2-a7cb801f335c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.732757] env[62585]: DEBUG oslo_concurrency.lockutils [None req-aadadff1-b13c-46f7-a61b-07ca70eaf937 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.746379] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.746622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.249490] env[62585]: INFO nova.compute.manager [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Detaching volume 9d961fba-19dd-4b69-83f5-35d1264a25d9 [ 1218.280220] env[62585]: INFO nova.virt.block_device [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Attempting to driver detach volume 9d961fba-19dd-4b69-83f5-35d1264a25d9 from mountpoint /dev/sdc [ 1218.280461] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Volume detach. Driver type: vmdk {{(pid=62585) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1218.280669] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294089', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'name': 'volume-9d961fba-19dd-4b69-83f5-35d1264a25d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'serial': '9d961fba-19dd-4b69-83f5-35d1264a25d9'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1218.281547] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408dca11-17cd-4f8e-bc66-f793132a2ab8 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.302456] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f236c4-1455-4923-805b-b94031185a3a {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.308930] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e45adc1-addb-47fe-b10f-3ad4d3223dfc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.329151] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5166522-2390-4c2e-95bc-5f1ec346fbcf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.342911] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] The volume has not been displaced from its original location: [datastore1] volume-9d961fba-19dd-4b69-83f5-35d1264a25d9/volume-9d961fba-19dd-4b69-83f5-35d1264a25d9.vmdk. No consolidation needed. {{(pid=62585) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1218.347948] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfiguring VM instance instance-0000006b to detach disk 2002 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1218.348217] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5237387b-70e0-4f64-867d-de29a099b0b3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.364657] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1218.364657] env[62585]: value = "task-1385252" [ 1218.364657] env[62585]: _type = "Task" [ 1218.364657] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.371648] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.873967] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385252, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.375182] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385252, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.875143] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385252, 'name': ReconfigVM_Task, 'duration_secs': 1.214055} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.875569] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Reconfigured VM instance instance-0000006b to detach disk 2002 {{(pid=62585) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1219.879863] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8805c2f3-b6c4-4346-bc7d-bcc7877c69d0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.894071] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1219.894071] env[62585]: value = "task-1385253" [ 1219.894071] env[62585]: _type = "Task" [ 1219.894071] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.901171] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.403341] env[62585]: DEBUG oslo_vmware.api [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385253, 'name': ReconfigVM_Task, 'duration_secs': 0.12773} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.403641] env[62585]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294089', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'name': 'volume-9d961fba-19dd-4b69-83f5-35d1264a25d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '64a25430-b7bb-474a-aee2-a7cb801f335c', 'attached_at': '', 'detached_at': '', 'volume_id': '9d961fba-19dd-4b69-83f5-35d1264a25d9', 'serial': '9d961fba-19dd-4b69-83f5-35d1264a25d9'} {{(pid=62585) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1220.943259] env[62585]: DEBUG nova.objects.instance [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'flavor' on Instance uuid 64a25430-b7bb-474a-aee2-a7cb801f335c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1221.950680] env[62585]: DEBUG oslo_concurrency.lockutils [None req-bc864cee-e832-40f9-a5ac-6c8e7296e3a3 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.204s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.130622] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.130979] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.131119] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "64a25430-b7bb-474a-aee2-a7cb801f335c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.131322] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.131496] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.133793] env[62585]: INFO nova.compute.manager [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Terminating instance [ 1223.135572] env[62585]: DEBUG nova.compute.manager [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Start destroying the instance on the hypervisor. {{(pid=62585) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1223.135770] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Destroying instance {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1223.136592] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59900092-688f-49e4-a229-1782a655d1b0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.144596] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Powering off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1223.144839] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4260c4f-6c80-4b3f-8bfa-b2b4eb557528 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.151986] env[62585]: DEBUG oslo_vmware.api [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1223.151986] env[62585]: value = "task-1385254" [ 1223.151986] env[62585]: _type = "Task" [ 1223.151986] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.160962] env[62585]: DEBUG oslo_vmware.api [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.661989] env[62585]: DEBUG oslo_vmware.api [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385254, 'name': PowerOffVM_Task, 'duration_secs': 0.17379} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.662290] env[62585]: DEBUG nova.virt.vmwareapi.vm_util [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Powered off the VM {{(pid=62585) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1223.662484] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Unregistering the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1223.662726] env[62585]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57913660-c403-42cf-a1c0-7200fec2323d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.726572] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Unregistered the VM {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1223.726784] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Deleting contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1223.726967] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Deleting the datastore file [datastore1] 64a25430-b7bb-474a-aee2-a7cb801f335c {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.727242] env[62585]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60101f75-e609-46b8-aec4-ba9f861c2adf {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.733460] env[62585]: DEBUG oslo_vmware.api [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for the task: (returnval){ [ 1223.733460] env[62585]: value = "task-1385256" [ 1223.733460] env[62585]: _type = "Task" [ 1223.733460] env[62585]: } to complete. {{(pid=62585) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.740689] env[62585]: DEBUG oslo_vmware.api [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.243349] env[62585]: DEBUG oslo_vmware.api [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Task: {'id': task-1385256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141517} completed successfully. {{(pid=62585) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.243734] env[62585]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Deleted the datastore file {{(pid=62585) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1224.243783] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Deleted contents of the VM from datastore datastore1 {{(pid=62585) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1224.244034] env[62585]: DEBUG nova.virt.vmwareapi.vmops [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Instance destroyed {{(pid=62585) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1224.244157] env[62585]: INFO nova.compute.manager [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1224.244422] env[62585]: DEBUG oslo.service.loopingcall [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62585) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1224.244622] env[62585]: DEBUG nova.compute.manager [-] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Deallocating network for instance {{(pid=62585) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1224.244717] env[62585]: DEBUG nova.network.neutron [-] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] deallocate_for_instance() {{(pid=62585) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1224.670016] env[62585]: DEBUG nova.compute.manager [req-56c75346-936a-4960-bfb3-e5226416d70b req-4c59ccda-4c50-46b9-a309-293d962488b2 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Received event network-vif-deleted-5d5de77a-cccd-423b-8d1c-7b96f243f6e9 {{(pid=62585) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1224.670447] env[62585]: INFO nova.compute.manager [req-56c75346-936a-4960-bfb3-e5226416d70b req-4c59ccda-4c50-46b9-a309-293d962488b2 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Neutron deleted interface 5d5de77a-cccd-423b-8d1c-7b96f243f6e9; detaching it from the instance and deleting it from the info cache [ 1224.670447] env[62585]: DEBUG nova.network.neutron [req-56c75346-936a-4960-bfb3-e5226416d70b req-4c59ccda-4c50-46b9-a309-293d962488b2 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.150336] env[62585]: DEBUG nova.network.neutron [-] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Updating instance_info_cache with network_info: [] {{(pid=62585) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.174470] env[62585]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7348f4fb-bd40-4ff1-a115-8954a24c93b9 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.182697] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee65596-1978-4361-b393-750b2a409564 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.206359] env[62585]: DEBUG nova.compute.manager [req-56c75346-936a-4960-bfb3-e5226416d70b req-4c59ccda-4c50-46b9-a309-293d962488b2 service nova] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Detach interface failed, port_id=5d5de77a-cccd-423b-8d1c-7b96f243f6e9, reason: Instance 64a25430-b7bb-474a-aee2-a7cb801f335c could not be found. {{(pid=62585) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1225.654122] env[62585]: INFO nova.compute.manager [-] [instance: 64a25430-b7bb-474a-aee2-a7cb801f335c] Took 1.41 seconds to deallocate network for instance. [ 1226.160228] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.160548] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.160788] env[62585]: DEBUG nova.objects.instance [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lazy-loading 'resources' on Instance uuid 64a25430-b7bb-474a-aee2-a7cb801f335c {{(pid=62585) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.698975] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8336b713-4467-4c2e-aea1-3c29fbbe7447 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.706608] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20dbc013-fd1c-498d-aff2-cd243ff49f3c {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.736209] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec32cce-3b10-4cf4-9171-0c9916f7e7d3 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.742671] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c342ddd-f902-47c2-a156-a8f4e9cd3fe1 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.754969] env[62585]: DEBUG nova.compute.provider_tree [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.258072] env[62585]: DEBUG nova.scheduler.client.report [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1227.763502] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.603s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.784360] env[62585]: INFO nova.scheduler.client.report [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Deleted allocations for instance 64a25430-b7bb-474a-aee2-a7cb801f335c [ 1228.292446] env[62585]: DEBUG oslo_concurrency.lockutils [None req-95b41f6c-bfc2-48fa-b6aa-8ad55bbda050 tempest-AttachVolumeTestJSON-2107358992 tempest-AttachVolumeTestJSON-2107358992-project-member] Lock "64a25430-b7bb-474a-aee2-a7cb801f335c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.161s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.360838] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.361308] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.361503] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Starting heal instance info cache {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1257.361683] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Rebuilding the list of instances to heal {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.864831] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Didn't find any instances for network info cache update. {{(pid=62585) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1257.865149] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.865388] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.865600] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.865834] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.866069] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.866282] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.866458] env[62585]: DEBUG nova.compute.manager [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62585) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1257.866653] env[62585]: DEBUG oslo_service.periodic_task [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62585) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1258.369822] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.370333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.370333] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.370564] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62585) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1258.371312] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29717ecc-7e14-41ca-9b9d-a0530898db0e {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.379654] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2245c152-5c8c-46c9-8988-4b243adb992d {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.392613] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed20f4aa-b3a4-4731-ab3f-7ae1522a3ae7 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.399432] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b309435-cd54-4878-88b3-e5545ff5abd0 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.428196] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181156MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62585) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1258.428391] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.428514] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.449352] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1259.449599] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62585) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1259.462364] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5970fcb-d427-49f7-9c5a-51404c5d8574 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.469770] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521b5df1-d978-43e5-a237-4de7f7e81265 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.499213] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7c9d0c-3d3a-4b40-8afe-32a8590f7ccc {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.506330] env[62585]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae92dad-706e-43e1-aa70-1feb8e8a0996 {{(pid=62585) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.519092] env[62585]: DEBUG nova.compute.provider_tree [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed in ProviderTree for provider: 66db9ec1-b5c3-45d2-a885-8e338110656b {{(pid=62585) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.022593] env[62585]: DEBUG nova.scheduler.client.report [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Inventory has not changed for provider 66db9ec1-b5c3-45d2-a885-8e338110656b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62585) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1260.528365] env[62585]: DEBUG nova.compute.resource_tracker [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62585) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1260.528848] env[62585]: DEBUG oslo_concurrency.lockutils [None req-90df9084-3a7b-4fb7-bfa0-ba65878f3060 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.100s {{(pid=62585) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}